Compare commits

...

112 commits

Author SHA1 Message Date
zeppi
b62cdb35ed test for fix es should 2022-03-24 19:26:51 -04:00
Jack Robison
e775aab89b
improve claim search test 2022-03-21 22:59:28 -04:00
Jack Robison
bb3337da5f
update imports 2022-03-21 22:58:49 -04:00
Jack Robison
47f7ab7bd6
update test 2022-03-16 00:06:56 -04:00
Jack Robison
2005cc2948
tests 2022-03-10 12:01:21 -05:00
Jack Robison
2e5c6a6d2a
import 2022-03-09 10:28:57 -05:00
Jack Robison
ca34d703a2
move lbry.wallet.server and lbry.schema to scribe 2022-03-08 20:40:09 -05:00
Jack Robison
af681d969c
move test_revertable.py to scribe 2022-03-07 10:56:28 -05:00
Jack Robison
45799bf330
remove imports to lbry.wallet.server 2022-03-07 10:56:28 -05:00
Jack Robison
f8bb89c8cd
move lbry.wallet.server.udp -> lbry.wallet.udp 2022-03-07 10:56:28 -05:00
Jack Robison
8faaf9f465
update orchstr8 2022-03-07 10:56:28 -05:00
Jack Robison
973ee4f08c
delete unused code 2022-02-22 13:10:08 -05:00
Jack Robison
b83360f3e2
executors 2022-02-22 13:10:08 -05:00
Jack Robison
dac1b82ea7
fix tests 2022-02-22 13:10:08 -05:00
Jack Robison
56f80cbcda
remove obsolete test_claim_search_as_reader_server 2022-02-22 13:10:08 -05:00
Jack Robison
28abd9c449
fix test teardown and setup 2022-02-22 13:10:08 -05:00
Jack Robison
8d86b0754c
set_default_executor 2022-02-22 13:10:08 -05:00
Jack Robison
bed3255b89
less noisy test log 2022-02-22 13:10:08 -05:00
Jack Robison
a17a31acf5
remove unused arg 2022-02-22 13:10:08 -05:00
Jack Robison
1f815cf2d2
cleanup mempool.py 2022-02-22 13:10:08 -05:00
Jack Robison
07ee73b653
update transaction_get_height 2022-02-22 13:10:08 -05:00
Jack Robison
7a9e8c6769
fix mempool notification 2022-02-22 13:10:08 -05:00
Jack Robison
a2901e4331
fix blocking_channel_ids and filtering_channel_ids cli args 2022-02-22 13:10:08 -05:00
Jack Robison
7f8268703c
update prometheus 2022-02-22 13:10:08 -05:00
Jack Robison
32d2208fd9
logging 2022-02-22 13:10:08 -05:00
Jack Robison
704ec9e553
add --reindex option to lbry-hub-elastic-sync 2022-02-22 13:10:08 -05:00
Jack Robison
e0f7066163
clean up claim producer 2022-02-22 13:10:08 -05:00
Jack Robison
32b26c9fa5
convert full scan iterators to range scans 2022-02-22 13:10:08 -05:00
Jack Robison
0d9d576436
add cache_size attribute to prefix classes to set the rocksdb lru cache size
-updates rocksdb column families to use custom sized `block_cache` (an lru cache) in a `BlockBasedTableFactory`
-lowers the default max open files to 64
2022-02-22 13:10:08 -05:00
Jack Robison
e6c275f86e
remove unused closed attribute 2022-02-22 13:10:08 -05:00
Jack Robison
937adbf439
add estimate_num_keys to prefix interface 2022-02-22 13:10:08 -05:00
Jack Robison
6a5ff0636c
debug 2022-02-22 13:10:08 -05:00
Jack Robison
888d47f88b
fix test 2022-02-22 13:10:08 -05:00
Jack Robison
d7e50b269f
fix test 2022-02-22 13:10:08 -05:00
Jack Robison
46ce175481
fix tests 2022-02-22 13:10:08 -05:00
Jack Robison
6b2d4175be
fix reorg notifications 2022-02-22 13:10:08 -05:00
Jack Robison
16bfb8589b
merge conflicts 2022-02-22 13:10:08 -05:00
Jack Robison
a4bb4db8dd
rename PrefixDB class 2022-02-22 13:10:08 -05:00
Jack Robison
de1e2d0e3b
fix column family init 2022-02-22 13:10:08 -05:00
Jack Robison
a4880c1cf0
flush in advance/rollback methods 2022-02-22 13:10:08 -05:00
Jack Robison
28f25538a3
remove unused attributes 2022-02-22 13:10:08 -05:00
Jack Robison
83c8576b3f
fix tests 2022-02-22 13:10:08 -05:00
Jack Robison
6fc909ea41
handle mempool race errors from lbcd 2022-02-22 13:10:08 -05:00
Jack Robison
c17544d8ef
fix mempool race condition in hub db writer 2022-02-22 13:10:08 -05:00
Jack Robison
7c46cc0805
fix lower bound 2022-02-22 13:10:08 -05:00
Jack Robison
fb4dc8342a
fix tests 2022-02-22 13:10:08 -05:00
Jack Robison
df91f4754a
comment out part of a resolve test until lbcwallet issue is fixed 2022-02-22 13:10:08 -05:00
Jack Robison
7d8bc38cb9
fix iterator lower bound 2022-02-22 13:10:08 -05:00
Jeffrey Picard
a319595f37
Fix a test? 2022-02-22 13:10:08 -05:00
Jeffrey Picard
31312af517
Small changes and fixes for hub refactor 2022-02-22 13:10:08 -05:00
Jack Robison
95ec1f3af4
fix test 2022-02-22 13:10:08 -05:00
Jack Robison
b093aa3911
fix another merge conflict 2022-02-22 13:10:08 -05:00
Jack Robison
7bd157ef17
fix more merge conflicts 2022-02-22 13:10:08 -05:00
Jack Robison
7f67cbfb40
fix merge conflict 2022-02-22 13:10:08 -05:00
Jeffrey Picard
2ea48bc8c2
skip segwit test 2022-02-22 13:09:57 -05:00
Jeffrey Picard
8d42b375a0
fix ssl connections and add new docker file 2022-02-22 13:09:57 -05:00
Lex Berezhny
c2acceaed5
change VERBOSITY to INFO 2022-02-22 13:09:57 -05:00
Lex Berezhny
a2db18010b
rebasing fixes 2022-02-22 13:09:57 -05:00
Victor Shyba
545b7c33b1
bump lbcd to valid latest version 2022-02-22 13:09:57 -05:00
Brannon King
07d584133e
get better error on RPC failure 2022-02-22 13:09:57 -05:00
Brannon King
a10eb30771
fixing stalls in tests 2022-02-22 13:09:57 -05:00
Brannon King
98e264f4cd
fix for the send-to-address timeout in asyncSetup 2022-02-22 13:09:57 -05:00
Roy Lee
083d6a3bc3
Update lbcd and lbcwallet versions 2022-02-22 13:09:57 -05:00
Brannon King
94e87f99d8
fixed some tests; made them not timeout waiting for tx confirmation 2022-02-22 13:09:57 -05:00
Roy Lee
fc4114621c
update lbcd to v0.22.100-beta-rc5 2022-02-22 13:09:57 -05:00
Victor Shyba
63bd6f5792
skip test_segwit while we take a look why lbcwallet rejects the tx 2022-02-22 13:09:57 -05:00
Victor Shyba
0c86ed924b
ignore the config line warning 2022-02-22 13:09:57 -05:00
Victor Shyba
4a93b906d7
add comment for temporary lbcd edge case 2022-02-22 13:09:57 -05:00
Victor Shyba
b727d2815f
skip the test that uses claimname/updateclaim for now 2022-02-22 13:09:57 -05:00
Victor Shyba
bad8ae7832
separate stdout/stderr from lbcctl on tests 2022-02-22 13:09:57 -05:00
Roy Lee
cffe895d22
update lbcd and lbcwallet veresion 2022-02-22 13:09:57 -05:00
Brannon King
ca4fec272a
fixing a variety of broken tests, updated getclaimbyid usage 2022-02-22 13:09:57 -05:00
Roy Lee
66419f1aa6
test: lbcd don't have a dedicated mempool.dat 2022-02-22 13:09:57 -05:00
Roy Lee
a13735769b
test: update lbcd to avoid txn being rejected due to munimum fee 2022-02-22 13:09:57 -05:00
Roy Lee
749f72a8c5
HACK: temporary hack to move things along 2022-02-22 13:09:57 -05:00
Roy Lee
e5f124fe68
add a rocksdb setup sanity check 2022-02-22 13:09:57 -05:00
Roy Lee
88ed67a5b3
test: migrate from lbrycrd to lbcd/lbcwallet 2022-02-22 13:09:57 -05:00
Roy Lee
d5598462b6
test: support generatetoaddress RPC 2022-02-22 13:09:57 -05:00
Roy Lee
31c60e167a
test: support walletpassphrase RPC 2022-02-22 13:09:57 -05:00
Roy Lee
fe04bfa10a
test: getnewaddress RPC takes account name 2022-02-22 13:09:57 -05:00
Roy Lee
c15bedfb6d
test: update some RPC arguments to int type 2022-02-22 13:09:57 -05:00
Roy Lee
0ff62495c7
test: setup loggers first 2022-02-22 13:09:57 -05:00
Roy Lee
b4be712a50
Update log level from WARN to INFO 2022-02-22 13:09:57 -05:00
Jack Robison
faa43fc20e
use rocksdb instead of leveldb
-dont use block processor directly from session manager
2022-02-22 13:09:57 -05:00
Jack Robison
f7deaa3303
remove print from test, uncomment a test assert 2022-02-22 13:09:57 -05:00
Jack Robison
d7ecde7040
drop ES_MODE hub setting, rename hub entry points
`lbry-hub-writer` - keeps an up-to-date database
`lbry-hub-server` - replies to clients with data from a read only snapshot of the db
`lbry-hub-elastic-sync` - watches for changes to the db and updates elasticsearch accordingly
2022-02-22 13:09:57 -05:00
Jack Robison
e2a75758f8
delete unused code 2022-02-22 13:09:57 -05:00
Jack Robison
f449cf61ab
update mempool in thread 2022-02-22 13:09:57 -05:00
Jack Robison
04db81e954
fix tests 2022-02-22 13:09:57 -05:00
Jack Robison
b49c9fd050
fix filtering/blocking 2022-02-22 13:09:57 -05:00
Jack Robison
3ff2bcf913
pylint 2022-02-22 13:09:57 -05:00
Jack Robison
556056c60d
tests 2022-02-22 13:09:57 -05:00
Jack Robison
3c03fff380
logging, update lbry-rocksdb requirement 2022-02-22 13:09:57 -05:00
Jack Robison
b1441d4247
wait for writing to finish before closing the db 2022-02-22 13:09:57 -05:00
Jack Robison
81458b75e4
fix unwinding readers during reorg 2022-02-22 13:09:57 -05:00
Jack Robison
f0f8ef044b
reduce hub writer polling delay 2022-02-22 13:09:57 -05:00
Jack Robison
47305e7446
executors 2022-02-22 13:09:57 -05:00
Jack Robison
ba6b985d71
cleanup debug logging 2022-02-22 13:09:57 -05:00
Jack Robison
49802b39cb
rocksdb column families 2022-02-22 13:09:57 -05:00
Jack Robison
46bcc5d725
executors for each reader, fix shutdown 2022-02-22 13:09:57 -05:00
Jack Robison
98f8fd0556
tests 2022-02-22 13:09:57 -05:00
Jack Robison
c0ce27ccf3
es sync notifier 2022-02-22 13:09:57 -05:00
Jack Robison
cfae30a364
reader/writer move from cli 2022-02-22 13:09:57 -05:00
Jack Robison
53e3828965
remove redundant undo cleanup, don't delete historic touched_or_deleted 2022-02-22 13:09:57 -05:00
Jack Robison
4f16f1c829
imports, impove errors 2022-02-22 13:09:57 -05:00
Jack Robison
478bd0510b
es writer 2022-02-22 13:09:57 -05:00
Jack Robison
499ee74dfc
add chain reader and reader server, new mempool, update block processor 2022-02-22 13:09:57 -05:00
Jack Robison
d3da442727
update undo key to include the block hash 2022-02-22 13:09:57 -05:00
Jack Robison
358fa21eaf
move Prefetcher to own file 2022-02-22 13:09:57 -05:00
Jack Robison
20f35d02fa
move HubDB, delete leveldb.py 2022-02-22 13:09:57 -05:00
Jack Robison
77e64ef028
add mempool, trending, and touched address indexes to the hub db 2022-02-22 13:09:57 -05:00
Jack Robison
0a71e2ff91
use rocksdb instead of leveldb
-dont use block processor directly from session manager
2022-02-22 13:09:57 -05:00
110 changed files with 1137 additions and 22866 deletions

View file

@ -15,7 +15,6 @@ RUN apt-get update && \
build-essential \ build-essential \
automake libtool \ automake libtool \
pkg-config \ pkg-config \
libleveldb-dev \
python3.7 \ python3.7 \
python3-dev \ python3-dev \
python3-pip \ python3-pip \

View file

@ -0,0 +1,48 @@
# FROM debian:10-slim
FROM python:3.7.12-slim-buster
ARG user=lbry
ARG db_dir=/database
ARG projects_dir=/home/$user
ARG DOCKER_TAG
ARG DOCKER_COMMIT=docker
ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
tar unzip \
build-essential \
automake libtool \
pkg-config
RUN pip install uvloop
RUN groupadd -g 999 $user && useradd -m -u 999 -g $user $user
RUN mkdir -p $db_dir
RUN chown -R $user:$user $db_dir
COPY . $projects_dir
RUN chown -R $user:$user $projects_dir
USER $user
WORKDIR $projects_dir
RUN make install
RUN python3 docker/set_build.py
RUN rm ~/.cache -rf
# entry point
ARG host=0.0.0.0
ARG tcp_port=50001
ARG daemon_url=http://lbry:lbry@192.99.151.178:9245/
VOLUME $db_dir
ENV TCP_PORT=$tcp_port
ENV HOST=$host
ENV DAEMON_URL=$daemon_url
ENV DB_DIRECTORY=$db_dir
ENV MAX_SESSIONS=1000000000
ENV MAX_SEND=1000000000000000000
ENV EVENT_LOOP_POLICY=uvloop
COPY ./docker/wallet_server_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View file

@ -20,6 +20,14 @@ if [[ -n "$SNAPSHOT_URL" ]] && [[ ! -f /database/lbry-leveldb ]]; then
rm "$filename" rm "$filename"
fi fi
/home/lbry/.local/bin/lbry-hub-elastic-sync if [ -z "$HUB_COMMAND" ]; then
echo 'starting server' echo "HUB_COMMAND env variable must be writer, reader, or es_sync"
/home/lbry/.local/bin/lbry-hub "$@" exit 1
fi
case "$HUB_COMMAND" in
writer ) /home/lbry/.local/bin/lbry-hub-writer "$@" ;;
reader ) /home/lbry/.local/bin/lbry-hub-server "$@" ;;
es_sync ) /home/lbry/.local/bin/lbry-hub-elastic-sync ;;
* ) "HUB_COMMAND env variable must be writer, reader, or es_sync" && exit 1 ;;
esac

View file

@ -27,7 +27,7 @@ from lbry.wallet import (
) )
from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies, dict_values_to_lbc from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies, dict_values_to_lbc
from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPE_NAMES from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPE_NAMES
from lbry.wallet.bip32 import PrivateKey from scribe.schema.bip32 import PrivateKey
from lbry import utils from lbry import utils
from lbry.conf import Config, Setting, NOT_SET from lbry.conf import Config, Setting, NOT_SET
@ -51,10 +51,9 @@ from lbry.extras.daemon.json_response_encoder import JSONResponseEncoder
from lbry.extras.daemon.undecorated import undecorated from lbry.extras.daemon.undecorated import undecorated
from lbry.extras.daemon.security import ensure_request_allowed from lbry.extras.daemon.security import ensure_request_allowed
from lbry.file_analysis import VideoFileAnalyzer from lbry.file_analysis import VideoFileAnalyzer
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.schema.url import URL, normalize_name from scribe.schema.url import URL, normalize_name
from lbry.wallet.server.db.elasticsearch.constants import RANGE_FIELDS, REPLACEMENTS
MY_RANGE_FIELDS = RANGE_FIELDS - {"limit_claims_per_channel"}
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from lbry.blob.blob_manager import BlobManager from lbry.blob.blob_manager import BlobManager
@ -67,6 +66,29 @@ if typing.TYPE_CHECKING:
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
RANGE_FIELDS = {
'height', 'creation_height', 'activation_height', 'expiration_height',
'timestamp', 'creation_timestamp', 'duration', 'release_time', 'fee_amount',
'tx_position', 'repost_count', 'limit_claims_per_channel',
'amount', 'effective_amount', 'support_amount',
'trending_score', 'censor_type', 'tx_num'
}
MY_RANGE_FIELDS = RANGE_FIELDS - {"limit_claims_per_channel"}
REPLACEMENTS = {
'claim_name': 'normalized_name',
'name': 'normalized_name',
'txid': 'tx_id',
'nout': 'tx_nout',
'trending_group': 'trending_score',
'trending_mixed': 'trending_score',
'trending_global': 'trending_score',
'trending_local': 'trending_score',
'reposted': 'repost_count',
'stream_types': 'stream_type',
'media_types': 'media_type',
'valid_channel_signature': 'is_signature_valid'
}
def is_transactional_function(name): def is_transactional_function(name):
for action in ('create', 'update', 'abandon', 'send', 'fund'): for action in ('create', 'update', 'abandon', 'send', 'fund'):

View file

@ -6,11 +6,11 @@ from json import JSONEncoder
from google.protobuf.message import DecodeError from google.protobuf.message import DecodeError
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.schema.support import Support from scribe.schema.support import Support
from scribe.schema.bip32 import PublicKey
from lbry.torrent.torrent_manager import TorrentSource from lbry.torrent.torrent_manager import TorrentSource
from lbry.wallet import Wallet, Ledger, Account, Transaction, Output from lbry.wallet import Wallet, Ledger, Account, Transaction, Output
from lbry.wallet.bip32 import PublicKey
from lbry.wallet.dewies import dewies_to_lbc from lbry.wallet.dewies import dewies_to_lbc
from lbry.stream.managed_stream import ManagedStream from lbry.stream.managed_stream import ManagedStream

View file

@ -3,7 +3,7 @@ import os
import json import json
import logging import logging
from binascii import hexlify from binascii import hexlify
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
log = logging.getLogger(__name__) log = logging.getLogger(__name__)

View file

@ -10,7 +10,7 @@ from lbry.wallet import SQLiteMixin
from lbry.conf import Config from lbry.conf import Config
from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies from lbry.wallet.dewies import dewies_to_lbc, lbc_to_dewies
from lbry.wallet.transaction import Transaction, Output from lbry.wallet.transaction import Transaction, Output
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.dht.constants import DATA_EXPIRATION from lbry.dht.constants import DATA_EXPIRATION
from lbry.blob.blob_info import BlobInfo from lbry.blob.blob_info import BlobInfo

View file

@ -9,7 +9,7 @@ from lbry.error import InvalidStreamURLError
from lbry.stream.managed_stream import ManagedStream from lbry.stream.managed_stream import ManagedStream
from lbry.torrent.torrent_manager import TorrentSource from lbry.torrent.torrent_manager import TorrentSource
from lbry.utils import cache_concurrent from lbry.utils import cache_concurrent
from lbry.schema.url import URL from scribe.schema.url import URL
from lbry.wallet.dewies import dewies_to_lbc from lbry.wallet.dewies import dewies_to_lbc
from lbry.file.source_manager import SourceManager from lbry.file.source_manager import SourceManager
from lbry.file.source import ManagedDownloadSource from lbry.file.source import ManagedDownloadSource

View file

@ -1,5 +0,0 @@
build:
rm types/v2/* -rf
touch types/v2/__init__.py
cd types/v2/ && protoc --python_out=. -I ../../../../../types/v2/proto/ ../../../../../types/v2/proto/*.proto
sed -e 's/^import\ \(.*\)_pb2\ /from . import\ \1_pb2\ /g' -i types/v2/*.py

View file

@ -1,24 +0,0 @@
Schema
=====
Those files are generated from the [types repo](https://github.com/lbryio/types). If you are modifying/adding a new type, make sure it is cloned in the same root folder as the SDK repo, like:
```
repos/
- lbry-sdk/
- types/
```
Then, [download protoc 3.2.0](https://github.com/protocolbuffers/protobuf/releases/tag/v3.2.0), add it to your PATH. On linux it is:
```bash
cd ~/.local/bin
wget https://github.com/protocolbuffers/protobuf/releases/download/v3.2.0/protoc-3.2.0-linux-x86_64.zip
unzip protoc-3.2.0-linux-x86_64.zip bin/protoc -d..
```
Finally, `make` should update everything in place.
### Why protoc 3.2.0?
Different/newer versions will generate larger diffs and we need to make sure they are good. In theory, we can just update to latest and it will all work, but it is a good practice to check blockchain data and retro compatibility before bumping versions (if you do, please update this section!).

View file

@ -1 +0,0 @@
from .claim import Claim

View file

@ -1,571 +0,0 @@
import json
import logging
import os.path
import hashlib
from typing import Tuple, List
from string import ascii_letters
from decimal import Decimal, ROUND_UP
from binascii import hexlify, unhexlify
from google.protobuf.json_format import MessageToDict
from lbry.crypto.base58 import Base58
from lbry.constants import COIN
from lbry.error import MissingPublishedFileError, EmptyPublishedFileError
from lbry.schema.mime_types import guess_media_type
from lbry.schema.base import Metadata, BaseMessageList
from lbry.schema.tags import clean_tags, normalize_tag
from lbry.schema.types.v2.claim_pb2 import (
Fee as FeeMessage,
Location as LocationMessage,
Language as LanguageMessage
)
log = logging.getLogger(__name__)
def calculate_sha384_file_hash(file_path):
sha384 = hashlib.sha384()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(128 * sha384.block_size), b''):
sha384.update(chunk)
return sha384.digest()
def country_int_to_str(country: int) -> str:
r = LocationMessage.Country.Name(country)
return r[1:] if r.startswith('R') else r
def country_str_to_int(country: str) -> int:
if len(country) == 3:
country = 'R' + country
return LocationMessage.Country.Value(country)
class Dimmensional(Metadata):
__slots__ = ()
@property
def width(self) -> int:
return self.message.width
@width.setter
def width(self, width: int):
self.message.width = width
@property
def height(self) -> int:
return self.message.height
@height.setter
def height(self, height: int):
self.message.height = height
@property
def dimensions(self) -> Tuple[int, int]:
return self.width, self.height
@dimensions.setter
def dimensions(self, dimensions: Tuple[int, int]):
self.message.width, self.message.height = dimensions
def _extract(self, file_metadata, field):
try:
setattr(self, field, file_metadata.getValues(field)[0])
except:
log.exception(f'Could not extract {field} from file metadata.')
def update(self, file_metadata=None, height=None, width=None):
if height is not None:
self.height = height
elif file_metadata:
self._extract(file_metadata, 'height')
if width is not None:
self.width = width
elif file_metadata:
self._extract(file_metadata, 'width')
class Playable(Metadata):
__slots__ = ()
@property
def duration(self) -> int:
return self.message.duration
@duration.setter
def duration(self, duration: int):
self.message.duration = duration
def update(self, file_metadata=None, duration=None):
if duration is not None:
self.duration = duration
elif file_metadata:
try:
self.duration = file_metadata.getValues('duration')[0].seconds
except:
log.exception('Could not extract duration from file metadata.')
class Image(Dimmensional):
__slots__ = ()
class Audio(Playable):
__slots__ = ()
class Video(Dimmensional, Playable):
__slots__ = ()
def update(self, file_metadata=None, height=None, width=None, duration=None):
Dimmensional.update(self, file_metadata, height, width)
Playable.update(self, file_metadata, duration)
class Source(Metadata):
__slots__ = ()
def update(self, file_path=None):
if file_path is not None:
self.name = os.path.basename(file_path)
self.media_type, stream_type = guess_media_type(file_path)
if not os.path.isfile(file_path):
raise MissingPublishedFileError(file_path)
self.size = os.path.getsize(file_path)
if self.size == 0:
raise EmptyPublishedFileError(file_path)
self.file_hash_bytes = calculate_sha384_file_hash(file_path)
return stream_type
@property
def name(self) -> str:
return self.message.name
@name.setter
def name(self, name: str):
self.message.name = name
@property
def size(self) -> int:
return self.message.size
@size.setter
def size(self, size: int):
self.message.size = size
@property
def media_type(self) -> str:
return self.message.media_type
@media_type.setter
def media_type(self, media_type: str):
self.message.media_type = media_type
@property
def file_hash(self) -> str:
return hexlify(self.message.hash).decode()
@file_hash.setter
def file_hash(self, file_hash: str):
self.message.hash = unhexlify(file_hash.encode())
@property
def file_hash_bytes(self) -> bytes:
return self.message.hash
@file_hash_bytes.setter
def file_hash_bytes(self, file_hash_bytes: bytes):
self.message.hash = file_hash_bytes
@property
def sd_hash(self) -> str:
return hexlify(self.message.sd_hash).decode()
@sd_hash.setter
def sd_hash(self, sd_hash: str):
self.message.sd_hash = unhexlify(sd_hash.encode())
@property
def sd_hash_bytes(self) -> bytes:
return self.message.sd_hash
@sd_hash_bytes.setter
def sd_hash_bytes(self, sd_hash: bytes):
self.message.sd_hash = sd_hash
@property
def bt_infohash(self) -> str:
return hexlify(self.message.bt_infohash).decode()
@bt_infohash.setter
def bt_infohash(self, bt_infohash: str):
self.message.bt_infohash = unhexlify(bt_infohash.encode())
@property
def bt_infohash_bytes(self) -> bytes:
return self.message.bt_infohash.decode()
@bt_infohash_bytes.setter
def bt_infohash_bytes(self, bt_infohash: bytes):
self.message.bt_infohash = bt_infohash
@property
def url(self) -> str:
return self.message.url
@url.setter
def url(self, url: str):
self.message.url = url
class Fee(Metadata):
__slots__ = ()
def update(self, address: str = None, currency: str = None, amount=None):
if amount:
currency = (currency or self.currency or '').lower()
if not currency:
raise Exception('In order to set a fee amount, please specify a fee currency.')
if currency not in ('lbc', 'btc', 'usd'):
raise Exception(f'Missing or unknown currency provided: {currency}')
setattr(self, currency, Decimal(amount))
elif currency:
raise Exception('In order to set a fee currency, please specify a fee amount.')
if address:
if not self.currency:
raise Exception('In order to set a fee address, please specify a fee amount and currency.')
self.address = address
@property
def currency(self) -> str:
if self.message.currency:
return FeeMessage.Currency.Name(self.message.currency)
@property
def address(self) -> str:
if self.address_bytes:
return Base58.encode(self.address_bytes)
@address.setter
def address(self, address: str):
self.address_bytes = Base58.decode(address)
@property
def address_bytes(self) -> bytes:
return self.message.address
@address_bytes.setter
def address_bytes(self, address: bytes):
self.message.address = address
@property
def amount(self) -> Decimal:
if self.currency == 'LBC':
return self.lbc
if self.currency == 'BTC':
return self.btc
if self.currency == 'USD':
return self.usd
DEWIES = Decimal(COIN)
@property
def lbc(self) -> Decimal:
if self.message.currency != FeeMessage.LBC:
raise ValueError('LBC can only be returned for LBC fees.')
return Decimal(self.message.amount / self.DEWIES)
@lbc.setter
def lbc(self, amount: Decimal):
self.dewies = int(amount * self.DEWIES)
@property
def dewies(self) -> int:
if self.message.currency != FeeMessage.LBC:
raise ValueError('Dewies can only be returned for LBC fees.')
return self.message.amount
@dewies.setter
def dewies(self, amount: int):
self.message.amount = amount
self.message.currency = FeeMessage.LBC
SATOSHIES = Decimal(COIN)
@property
def btc(self) -> Decimal:
if self.message.currency != FeeMessage.BTC:
raise ValueError('BTC can only be returned for BTC fees.')
return Decimal(self.message.amount / self.SATOSHIES)
@btc.setter
def btc(self, amount: Decimal):
self.satoshis = int(amount * self.SATOSHIES)
@property
def satoshis(self) -> int:
if self.message.currency != FeeMessage.BTC:
raise ValueError('Satoshies can only be returned for BTC fees.')
return self.message.amount
@satoshis.setter
def satoshis(self, amount: int):
self.message.amount = amount
self.message.currency = FeeMessage.BTC
PENNIES = Decimal('100.0')
PENNY = Decimal('0.01')
@property
def usd(self) -> Decimal:
if self.message.currency != FeeMessage.USD:
raise ValueError('USD can only be returned for USD fees.')
return Decimal(self.message.amount / self.PENNIES)
@usd.setter
def usd(self, amount: Decimal):
self.pennies = int(amount.quantize(self.PENNY, ROUND_UP) * self.PENNIES)
@property
def pennies(self) -> int:
if self.message.currency != FeeMessage.USD:
raise ValueError('Pennies can only be returned for USD fees.')
return self.message.amount
@pennies.setter
def pennies(self, amount: int):
self.message.amount = amount
self.message.currency = FeeMessage.USD
class ClaimReference(Metadata):
__slots__ = ()
@property
def claim_id(self) -> str:
return hexlify(self.claim_hash[::-1]).decode()
@claim_id.setter
def claim_id(self, claim_id: str):
self.claim_hash = unhexlify(claim_id)[::-1]
@property
def claim_hash(self) -> bytes:
return self.message.claim_hash
@claim_hash.setter
def claim_hash(self, claim_hash: bytes):
self.message.claim_hash = claim_hash
class ClaimList(BaseMessageList[ClaimReference]):
__slots__ = ()
item_class = ClaimReference
@property
def _message(self):
return self.message.claim_references
def append(self, value):
self.add().claim_id = value
@property
def ids(self) -> List[str]:
return [c.claim_id for c in self]
class Language(Metadata):
__slots__ = ()
@property
def langtag(self) -> str:
langtag = []
if self.language:
langtag.append(self.language)
if self.script:
langtag.append(self.script)
if self.region:
langtag.append(self.region)
return '-'.join(langtag)
@langtag.setter
def langtag(self, langtag: str):
parts = langtag.split('-')
self.language = parts.pop(0)
if parts and len(parts[0]) == 4:
self.script = parts.pop(0)
if parts and len(parts[0]) == 2 and parts[0].isalpha():
self.region = parts.pop(0)
if parts and len(parts[0]) == 3 and parts[0].isdigit():
self.region = parts.pop(0)
assert not parts, f"Failed to parse language tag: {langtag}"
@property
def language(self) -> str:
if self.message.language:
return LanguageMessage.Language.Name(self.message.language)
@language.setter
def language(self, language: str):
self.message.language = LanguageMessage.Language.Value(language)
@property
def script(self) -> str:
if self.message.script:
return LanguageMessage.Script.Name(self.message.script)
@script.setter
def script(self, script: str):
self.message.script = LanguageMessage.Script.Value(script)
@property
def region(self) -> str:
if self.message.region:
return country_int_to_str(self.message.region)
@region.setter
def region(self, region: str):
self.message.region = country_str_to_int(region)
class LanguageList(BaseMessageList[Language]):
__slots__ = ()
item_class = Language
def append(self, value: str):
self.add().langtag = value
class Location(Metadata):
__slots__ = ()
def from_value(self, value):
if isinstance(value, str) and value.startswith('{'):
value = json.loads(value)
if isinstance(value, dict):
for key, val in value.items():
setattr(self, key, val)
elif isinstance(value, str):
parts = value.split(':')
if len(parts) > 2 or (parts[0] and parts[0][0] in ascii_letters):
country = parts and parts.pop(0)
if country:
self.country = country
state = parts and parts.pop(0)
if state:
self.state = state
city = parts and parts.pop(0)
if city:
self.city = city
code = parts and parts.pop(0)
if code:
self.code = code
latitude = parts and parts.pop(0)
if latitude:
self.latitude = latitude
longitude = parts and parts.pop(0)
if longitude:
self.longitude = longitude
else:
raise ValueError(f'Could not parse country value: {value}')
def to_dict(self):
d = MessageToDict(self.message)
if self.message.longitude:
d['longitude'] = self.longitude
if self.message.latitude:
d['latitude'] = self.latitude
return d
@property
def country(self) -> str:
if self.message.country:
return LocationMessage.Country.Name(self.message.country)
@country.setter
def country(self, country: str):
self.message.country = LocationMessage.Country.Value(country)
@property
def state(self) -> str:
return self.message.state
@state.setter
def state(self, state: str):
self.message.state = state
@property
def city(self) -> str:
return self.message.city
@city.setter
def city(self, city: str):
self.message.city = city
@property
def code(self) -> str:
return self.message.code
@code.setter
def code(self, code: str):
self.message.code = code
GPS_PRECISION = Decimal('10000000')
@property
def latitude(self) -> str:
if self.message.latitude:
return str(Decimal(self.message.latitude) / self.GPS_PRECISION)
@latitude.setter
def latitude(self, latitude: str):
latitude = Decimal(latitude)
assert -90 <= latitude <= 90, "Latitude must be between -90 and 90 degrees."
self.message.latitude = int(latitude * self.GPS_PRECISION)
@property
def longitude(self) -> str:
if self.message.longitude:
return str(Decimal(self.message.longitude) / self.GPS_PRECISION)
@longitude.setter
def longitude(self, longitude: str):
longitude = Decimal(longitude)
assert -180 <= longitude <= 180, "Longitude must be between -180 and 180 degrees."
self.message.longitude = int(longitude * self.GPS_PRECISION)
class LocationList(BaseMessageList[Location]):
__slots__ = ()
item_class = Location
def append(self, value):
self.add().from_value(value)
class TagList(BaseMessageList[str]):
__slots__ = ()
item_class = str
def append(self, tag: str):
tag = normalize_tag(tag)
if tag and tag not in self.message:
self.message.append(tag)

View file

@ -1,124 +0,0 @@
from binascii import hexlify, unhexlify
from typing import List, Iterator, TypeVar, Generic
from google.protobuf.message import DecodeError
from google.protobuf.json_format import MessageToDict
class Signable:
__slots__ = (
'message', 'version', 'signature',
'signature_type', 'unsigned_payload', 'signing_channel_hash'
)
message_class = None
def __init__(self, message=None):
self.message = message or self.message_class()
self.version = 2
self.signature = None
self.signature_type = 'SECP256k1'
self.unsigned_payload = None
self.signing_channel_hash = None
def clear_signature(self):
self.signature = None
self.unsigned_payload = None
self.signing_channel_hash = None
@property
def signing_channel_id(self):
return hexlify(self.signing_channel_hash[::-1]).decode() if self.signing_channel_hash else None
@signing_channel_id.setter
def signing_channel_id(self, channel_id: str):
self.signing_channel_hash = unhexlify(channel_id)[::-1]
@property
def is_signed(self):
return self.signature is not None
def to_dict(self):
return MessageToDict(self.message)
def to_message_bytes(self) -> bytes:
return self.message.SerializeToString()
def to_bytes(self) -> bytes:
pieces = bytearray()
if self.is_signed:
pieces.append(1)
pieces.extend(self.signing_channel_hash)
pieces.extend(self.signature)
else:
pieces.append(0)
pieces.extend(self.to_message_bytes())
return bytes(pieces)
@classmethod
def from_bytes(cls, data: bytes):
signable = cls()
if data[0] == 0:
signable.message.ParseFromString(data[1:])
elif data[0] == 1:
signable.signing_channel_hash = data[1:21]
signable.signature = data[21:85]
signable.message.ParseFromString(data[85:])
else:
raise DecodeError('Could not determine message format version.')
return signable
def __len__(self):
return len(self.to_bytes())
def __bytes__(self):
return self.to_bytes()
class Metadata:
__slots__ = 'message',
def __init__(self, message):
self.message = message
I = TypeVar('I')
class BaseMessageList(Metadata, Generic[I]):
__slots__ = ()
item_class = None
@property
def _message(self):
return self.message
def add(self) -> I:
return self.item_class(self._message.add())
def extend(self, values: List[str]):
for value in values:
self.append(value)
def append(self, value: str):
raise NotImplemented
def __len__(self):
return len(self._message)
def __iter__(self) -> Iterator[I]:
for item in self._message:
yield self.item_class(item)
def __getitem__(self, item) -> I:
return self.item_class(self._message[item])
def __delitem__(self, key):
del self._message[key]
def __eq__(self, other) -> bool:
return self._message == other

View file

@ -1,422 +0,0 @@
import logging
from typing import List
from binascii import hexlify, unhexlify
from asn1crypto.keys import PublicKeyInfo
from coincurve import PublicKey as cPublicKey
from google.protobuf.json_format import MessageToDict
from google.protobuf.message import DecodeError
from hachoir.core.log import log as hachoir_log
from hachoir.parser import createParser as binary_file_parser
from hachoir.metadata import extractMetadata as binary_file_metadata
from lbry.schema import compat
from lbry.schema.base import Signable
from lbry.schema.mime_types import guess_media_type, guess_stream_type
from lbry.schema.attrs import (
Source, Playable, Dimmensional, Fee, Image, Video, Audio,
LanguageList, LocationList, ClaimList, ClaimReference, TagList
)
from lbry.schema.types.v2.claim_pb2 import Claim as ClaimMessage
from lbry.error import InputValueIsNoneError
hachoir_log.use_print = False
log = logging.getLogger(__name__)
class Claim(Signable):
STREAM = 'stream'
CHANNEL = 'channel'
COLLECTION = 'collection'
REPOST = 'repost'
__slots__ = ()
message_class = ClaimMessage
@property
def claim_type(self) -> str:
return self.message.WhichOneof('type')
def get_message(self, type_name):
message = getattr(self.message, type_name)
if self.claim_type is None:
message.SetInParent()
if self.claim_type != type_name:
raise ValueError(f'Claim is not a {type_name}.')
return message
@property
def is_stream(self):
return self.claim_type == self.STREAM
@property
def stream(self) -> 'Stream':
return Stream(self)
@property
def is_channel(self):
return self.claim_type == self.CHANNEL
@property
def channel(self) -> 'Channel':
return Channel(self)
@property
def is_repost(self):
return self.claim_type == self.REPOST
@property
def repost(self) -> 'Repost':
return Repost(self)
@property
def is_collection(self):
return self.claim_type == self.COLLECTION
@property
def collection(self) -> 'Collection':
return Collection(self)
@classmethod
def from_bytes(cls, data: bytes) -> 'Claim':
try:
return super().from_bytes(data)
except DecodeError:
claim = cls()
if data[0] == ord('{'):
claim.version = 0
compat.from_old_json_schema(claim, data)
elif data[0] not in (0, 1):
claim.version = 1
compat.from_types_v1(claim, data)
else:
raise
return claim
class BaseClaim:
__slots__ = 'claim', 'message'
claim_type = None
object_fields = 'thumbnail',
repeat_fields = 'tags', 'languages', 'locations'
def __init__(self, claim: Claim = None):
self.claim = claim or Claim()
self.message = self.claim.get_message(self.claim_type)
def to_dict(self):
claim = MessageToDict(self.claim.message, preserving_proto_field_name=True)
claim.update(claim.pop(self.claim_type))
if 'languages' in claim:
claim['languages'] = self.langtags
if 'locations' in claim:
claim['locations'] = [l.to_dict() for l in self.locations]
return claim
def none_check(self, kwargs):
for key, value in kwargs.items():
if value is None:
raise InputValueIsNoneError(key)
def update(self, **kwargs):
self.none_check(kwargs)
for key in list(kwargs):
for field in self.object_fields:
if key.startswith(f'{field}_'):
attr = getattr(self, field)
setattr(attr, key[len(f'{field}_'):], kwargs.pop(key))
continue
for l in self.repeat_fields:
field = getattr(self, l)
if kwargs.pop(f'clear_{l}', False):
del field[:]
items = kwargs.pop(l, None)
if items is not None:
if isinstance(items, str):
field.append(items)
elif isinstance(items, list):
field.extend(items)
else:
raise ValueError(f"Unknown {l} value: {items}")
for key, value in kwargs.items():
setattr(self, key, value)
@property
def title(self) -> str:
return self.claim.message.title
@title.setter
def title(self, title: str):
self.claim.message.title = title
@property
def description(self) -> str:
return self.claim.message.description
@description.setter
def description(self, description: str):
self.claim.message.description = description
@property
def thumbnail(self) -> Source:
return Source(self.claim.message.thumbnail)
@property
def tags(self) -> List[str]:
return TagList(self.claim.message.tags)
@property
def languages(self) -> LanguageList:
return LanguageList(self.claim.message.languages)
@property
def langtags(self) -> List[str]:
return [l.langtag for l in self.languages]
@property
def locations(self) -> LocationList:
return LocationList(self.claim.message.locations)
class Stream(BaseClaim):
__slots__ = ()
claim_type = Claim.STREAM
object_fields = BaseClaim.object_fields + ('source',)
def to_dict(self):
claim = super().to_dict()
if 'source' in claim:
if 'hash' in claim['source']:
claim['source']['hash'] = self.source.file_hash
if 'sd_hash' in claim['source']:
claim['source']['sd_hash'] = self.source.sd_hash
elif 'bt_infohash' in claim['source']:
claim['source']['bt_infohash'] = self.source.bt_infohash
if 'media_type' in claim['source']:
claim['stream_type'] = guess_stream_type(claim['source']['media_type'])
fee = claim.get('fee', {})
if 'address' in fee:
fee['address'] = self.fee.address
if 'amount' in fee:
fee['amount'] = str(self.fee.amount)
return claim
def update(self, file_path=None, height=None, width=None, duration=None, **kwargs):
if kwargs.pop('clear_fee', False):
self.message.ClearField('fee')
else:
self.fee.update(
kwargs.pop('fee_address', None),
kwargs.pop('fee_currency', None),
kwargs.pop('fee_amount', None)
)
self.none_check(kwargs)
if 'sd_hash' in kwargs:
self.source.sd_hash = kwargs.pop('sd_hash')
elif 'bt_infohash' in kwargs:
self.source.bt_infohash = kwargs.pop('bt_infohash')
if 'file_name' in kwargs:
self.source.name = kwargs.pop('file_name')
if 'file_hash' in kwargs:
self.source.file_hash = kwargs.pop('file_hash')
stream_type = None
if file_path is not None:
stream_type = self.source.update(file_path=file_path)
elif self.source.name:
self.source.media_type, stream_type = guess_media_type(self.source.name)
elif self.source.media_type:
stream_type = guess_stream_type(self.source.media_type)
if 'file_size' in kwargs:
self.source.size = kwargs.pop('file_size')
if self.stream_type is not None and self.stream_type != stream_type:
self.message.ClearField(self.stream_type)
if stream_type in ('image', 'video', 'audio'):
media = getattr(self, stream_type)
media_args = {'file_metadata': None}
if file_path is not None and not all((duration, width, height)):
try:
media_args['file_metadata'] = binary_file_metadata(binary_file_parser(file_path))
except:
log.exception('Could not read file metadata.')
if isinstance(media, Playable):
media_args['duration'] = duration
if isinstance(media, Dimmensional):
media_args['height'] = height
media_args['width'] = width
media.update(**media_args)
super().update(**kwargs)
@property
def author(self) -> str:
return self.message.author
@author.setter
def author(self, author: str):
self.message.author = author
@property
def license(self) -> str:
return self.message.license
@license.setter
def license(self, license: str):
self.message.license = license
@property
def license_url(self) -> str:
return self.message.license_url
@license_url.setter
def license_url(self, license_url: str):
self.message.license_url = license_url
@property
def release_time(self) -> int:
return self.message.release_time
@release_time.setter
def release_time(self, release_time: int):
self.message.release_time = release_time
@property
def fee(self) -> Fee:
return Fee(self.message.fee)
@property
def has_fee(self) -> bool:
return self.message.HasField('fee')
@property
def has_source(self) -> bool:
return self.message.HasField('source')
@property
def source(self) -> Source:
return Source(self.message.source)
@property
def stream_type(self) -> str:
return self.message.WhichOneof('type')
@property
def image(self) -> Image:
return Image(self.message.image)
@property
def video(self) -> Video:
return Video(self.message.video)
@property
def audio(self) -> Audio:
return Audio(self.message.audio)
class Channel(BaseClaim):
__slots__ = ()
claim_type = Claim.CHANNEL
object_fields = BaseClaim.object_fields + ('cover',)
repeat_fields = BaseClaim.repeat_fields + ('featured',)
def to_dict(self):
claim = super().to_dict()
claim['public_key'] = self.public_key
if 'featured' in claim:
claim['featured'] = self.featured.ids
return claim
@property
def public_key(self) -> str:
return hexlify(self.public_key_bytes).decode()
@public_key.setter
def public_key(self, sd_public_key: str):
self.message.public_key = unhexlify(sd_public_key.encode())
@property
def public_key_bytes(self) -> bytes:
if len(self.message.public_key) == 33:
return self.message.public_key
public_key_info = PublicKeyInfo.load(self.message.public_key)
public_key = cPublicKey(public_key_info.native['public_key'])
return public_key.format(compressed=True)
@public_key_bytes.setter
def public_key_bytes(self, public_key: bytes):
self.message.public_key = public_key
@property
def email(self) -> str:
return self.message.email
@email.setter
def email(self, email: str):
self.message.email = email
@property
def website_url(self) -> str:
return self.message.website_url
@website_url.setter
def website_url(self, website_url: str):
self.message.website_url = website_url
@property
def cover(self) -> Source:
return Source(self.message.cover)
@property
def featured(self) -> ClaimList:
return ClaimList(self.message.featured)
class Repost(BaseClaim):
__slots__ = ()
claim_type = Claim.REPOST
@property
def reference(self) -> ClaimReference:
return ClaimReference(self.message)
class Collection(BaseClaim):
__slots__ = ()
claim_type = Claim.COLLECTION
repeat_fields = BaseClaim.repeat_fields + ('claims',)
def to_dict(self):
claim = super().to_dict()
if claim.pop('claim_references', None):
claim['claims'] = self.claims.ids
return claim
@property
def claims(self) -> ClaimList:
return ClaimList(self.message)

View file

@ -1,93 +0,0 @@
import json
from decimal import Decimal
from google.protobuf.message import DecodeError
from lbry.schema.types.v1.legacy_claim_pb2 import Claim as OldClaimMessage
from lbry.schema.types.v1.certificate_pb2 import KeyType
from lbry.schema.types.v1.fee_pb2 import Fee as FeeMessage
def from_old_json_schema(claim, payload: bytes):
try:
value = json.loads(payload)
except:
raise DecodeError('Could not parse JSON.')
stream = claim.stream
stream.source.sd_hash = value['sources']['lbry_sd_hash']
stream.source.media_type = (
value.get('content_type', value.get('content-type')) or
'application/octet-stream'
)
stream.title = value.get('title', '')
stream.description = value.get('description', '')
if value.get('thumbnail', ''):
stream.thumbnail.url = value.get('thumbnail', '')
stream.author = value.get('author', '')
stream.license = value.get('license', '')
stream.license_url = value.get('license_url', '')
language = value.get('language', '')
if language:
if language.lower() == 'english':
language = 'en'
try:
stream.languages.append(language)
except:
pass
if value.get('nsfw', False):
stream.tags.append('mature')
if "fee" in value and isinstance(value['fee'], dict):
fee = value["fee"]
currency = list(fee.keys())[0]
if currency == 'LBC':
stream.fee.lbc = Decimal(fee[currency]['amount'])
elif currency == 'USD':
stream.fee.usd = Decimal(fee[currency]['amount'])
elif currency == 'BTC':
stream.fee.btc = Decimal(fee[currency]['amount'])
else:
raise DecodeError(f'Unknown currency: {currency}')
stream.fee.address = fee[currency]['address']
return claim
def from_types_v1(claim, payload: bytes):
old = OldClaimMessage()
old.ParseFromString(payload)
if old.claimType == 2:
channel = claim.channel
channel.public_key_bytes = old.certificate.publicKey
else:
stream = claim.stream
stream.title = old.stream.metadata.title
stream.description = old.stream.metadata.description
stream.author = old.stream.metadata.author
stream.license = old.stream.metadata.license
stream.license_url = old.stream.metadata.licenseUrl
stream.thumbnail.url = old.stream.metadata.thumbnail
if old.stream.metadata.HasField('language'):
stream.languages.add().message.language = old.stream.metadata.language
stream.source.media_type = old.stream.source.contentType
stream.source.sd_hash_bytes = old.stream.source.source
if old.stream.metadata.nsfw:
stream.tags.append('mature')
if old.stream.metadata.HasField('fee'):
fee = old.stream.metadata.fee
stream.fee.address_bytes = fee.address
currency = FeeMessage.Currency.Name(fee.currency)
if currency == 'LBC':
stream.fee.lbc = Decimal(fee.amount)
elif currency == 'USD':
stream.fee.usd = Decimal(fee.amount)
elif currency == 'BTC':
stream.fee.btc = Decimal(fee.amount)
else:
raise DecodeError(f'Unsupported currency: {currency}')
if old.HasField('publisherSignature'):
sig = old.publisherSignature
claim.signature = sig.signature
claim.signature_type = KeyType.Name(sig.signatureType)
claim.signing_channel_hash = sig.certificateId[::-1]
old.ClearField("publisherSignature")
claim.unsigned_payload = old.SerializeToString()
return claim

View file

@ -1,214 +0,0 @@
import os
import filetype
import logging
types_map = {
# http://www.iana.org/assignments/media-types
# Type mapping for automated metadata extraction (video, audio, image, document, binary, model)
'.a': ('application/octet-stream', 'binary'),
'.ai': ('application/postscript', 'image'),
'.aif': ('audio/x-aiff', 'audio'),
'.aifc': ('audio/x-aiff', 'audio'),
'.aiff': ('audio/x-aiff', 'audio'),
'.au': ('audio/basic', 'audio'),
'.avi': ('video/x-msvideo', 'video'),
'.bat': ('text/plain', 'document'),
'.bcpio': ('application/x-bcpio', 'binary'),
'.bin': ('application/octet-stream', 'binary'),
'.bmp': ('image/bmp', 'image'),
'.c': ('text/plain', 'document'),
'.cdf': ('application/x-netcdf', 'binary'),
'.cpio': ('application/x-cpio', 'binary'),
'.csh': ('application/x-csh', 'binary'),
'.css': ('text/css', 'document'),
'.csv': ('text/csv', 'document'),
'.dll': ('application/octet-stream', 'binary'),
'.doc': ('application/msword', 'document'),
'.dot': ('application/msword', 'document'),
'.dvi': ('application/x-dvi', 'binary'),
'.eml': ('message/rfc822', 'document'),
'.eps': ('application/postscript', 'document'),
'.epub': ('application/epub+zip', 'document'),
'.etx': ('text/x-setext', 'document'),
'.exe': ('application/octet-stream', 'binary'),
'.gif': ('image/gif', 'image'),
'.gtar': ('application/x-gtar', 'binary'),
'.h': ('text/plain', 'document'),
'.hdf': ('application/x-hdf', 'binary'),
'.htm': ('text/html', 'document'),
'.html': ('text/html', 'document'),
'.ico': ('image/vnd.microsoft.icon', 'image'),
'.ief': ('image/ief', 'image'),
'.iges': ('model/iges', 'model'),
'.jpe': ('image/jpeg', 'image'),
'.jpeg': ('image/jpeg', 'image'),
'.jpg': ('image/jpeg', 'image'),
'.js': ('application/javascript', 'document'),
'.json': ('application/json', 'document'),
'.ksh': ('text/plain', 'document'),
'.latex': ('application/x-latex', 'binary'),
'.m1v': ('video/mpeg', 'video'),
'.m3u': ('application/x-mpegurl', 'audio'),
'.m3u8': ('application/x-mpegurl', 'video'),
'.man': ('application/x-troff-man', 'document'),
'.markdown': ('text/markdown', 'document'),
'.md': ('text/markdown', 'document'),
'.me': ('application/x-troff-me', 'binary'),
'.mht': ('message/rfc822', 'document'),
'.mhtml': ('message/rfc822', 'document'),
'.mif': ('application/x-mif', 'binary'),
'.mov': ('video/quicktime', 'video'),
'.movie': ('video/x-sgi-movie', 'video'),
'.mp2': ('audio/mpeg', 'audio'),
'.mp3': ('audio/mpeg', 'audio'),
'.mp4': ('video/mp4', 'video'),
'.mpa': ('video/mpeg', 'video'),
'.mpd': ('application/dash+xml', 'video'),
'.mpe': ('video/mpeg', 'video'),
'.mpeg': ('video/mpeg', 'video'),
'.mpg': ('video/mpeg', 'video'),
'.ms': ('application/x-troff-ms', 'binary'),
'.m4s': ('video/iso.segment', 'binary'),
'.nc': ('application/x-netcdf', 'binary'),
'.nws': ('message/rfc822', 'document'),
'.o': ('application/octet-stream', 'binary'),
'.obj': ('application/octet-stream', 'model'),
'.oda': ('application/oda', 'binary'),
'.p12': ('application/x-pkcs12', 'binary'),
'.p7c': ('application/pkcs7-mime', 'binary'),
'.pbm': ('image/x-portable-bitmap', 'image'),
'.pdf': ('application/pdf', 'document'),
'.pfx': ('application/x-pkcs12', 'binary'),
'.pgm': ('image/x-portable-graymap', 'image'),
'.pl': ('text/plain', 'document'),
'.png': ('image/png', 'image'),
'.pnm': ('image/x-portable-anymap', 'image'),
'.pot': ('application/vnd.ms-powerpoint', 'document'),
'.ppa': ('application/vnd.ms-powerpoint', 'document'),
'.ppm': ('image/x-portable-pixmap', 'image'),
'.pps': ('application/vnd.ms-powerpoint', 'document'),
'.ppt': ('application/vnd.ms-powerpoint', 'document'),
'.ps': ('application/postscript', 'document'),
'.pwz': ('application/vnd.ms-powerpoint', 'document'),
'.py': ('text/x-python', 'document'),
'.pyc': ('application/x-python-code', 'binary'),
'.pyo': ('application/x-python-code', 'binary'),
'.qt': ('video/quicktime', 'video'),
'.ra': ('audio/x-pn-realaudio', 'audio'),
'.ram': ('application/x-pn-realaudio', 'audio'),
'.ras': ('image/x-cmu-raster', 'image'),
'.rdf': ('application/xml', 'binary'),
'.rgb': ('image/x-rgb', 'image'),
'.roff': ('application/x-troff', 'binary'),
'.rtx': ('text/richtext', 'document'),
'.sgm': ('text/x-sgml', 'document'),
'.sgml': ('text/x-sgml', 'document'),
'.sh': ('application/x-sh', 'document'),
'.shar': ('application/x-shar', 'binary'),
'.snd': ('audio/basic', 'audio'),
'.so': ('application/octet-stream', 'binary'),
'.src': ('application/x-wais-source', 'binary'),
'.stl': ('model/stl', 'model'),
'.sv4cpio': ('application/x-sv4cpio', 'binary'),
'.sv4crc': ('application/x-sv4crc', 'binary'),
'.svg': ('image/svg+xml', 'image'),
'.swf': ('application/x-shockwave-flash', 'binary'),
'.t': ('application/x-troff', 'binary'),
'.tar': ('application/x-tar', 'binary'),
'.tcl': ('application/x-tcl', 'binary'),
'.tex': ('application/x-tex', 'binary'),
'.texi': ('application/x-texinfo', 'binary'),
'.texinfo': ('application/x-texinfo', 'binary'),
'.tif': ('image/tiff', 'image'),
'.tiff': ('image/tiff', 'image'),
'.tr': ('application/x-troff', 'binary'),
'.ts': ('video/mp2t', 'video'),
'.tsv': ('text/tab-separated-values', 'document'),
'.txt': ('text/plain', 'document'),
'.ustar': ('application/x-ustar', 'binary'),
'.vcf': ('text/x-vcard', 'document'),
'.vtt': ('text/vtt', 'document'),
'.wav': ('audio/x-wav', 'audio'),
'.webm': ('video/webm', 'video'),
'.wiz': ('application/msword', 'document'),
'.wsdl': ('application/xml', 'document'),
'.xbm': ('image/x-xbitmap', 'image'),
'.xlb': ('application/vnd.ms-excel', 'document'),
'.xls': ('application/vnd.ms-excel', 'document'),
'.xml': ('text/xml', 'document'),
'.xpdl': ('application/xml', 'document'),
'.xpm': ('image/x-xpixmap', 'image'),
'.xsl': ('application/xml', 'document'),
'.xwd': ('image/x-xwindowdump', 'image'),
'.zip': ('application/zip', 'binary'),
# These are non-standard types, commonly found in the wild.
'.cbr': ('application/vnd.comicbook-rar', 'document'),
'.cbz': ('application/vnd.comicbook+zip', 'document'),
'.flac': ('audio/flac', 'audio'),
'.lbry': ('application/x-ext-lbry', 'document'),
'.m4a': ('audio/mp4', 'audio'),
'.m4v': ('video/m4v', 'video'),
'.mid': ('audio/midi', 'audio'),
'.midi': ('audio/midi', 'audio'),
'.mkv': ('video/x-matroska', 'video'),
'.mobi': ('application/x-mobipocket-ebook', 'document'),
'.oga': ('audio/ogg', 'audio'),
'.ogv': ('video/ogg', 'video'),
'.ogg': ('video/ogg', 'video'),
'.pct': ('image/pict', 'image'),
'.pic': ('image/pict', 'image'),
'.pict': ('image/pict', 'image'),
'.prc': ('application/x-mobipocket-ebook', 'document'),
'.rtf': ('application/rtf', 'document'),
'.xul': ('text/xul', 'document'),
# microsoft is special and has its own 'standard'
# https://docs.microsoft.com/en-us/windows/desktop/wmp/file-name-extensions
'.wmv': ('video/x-ms-wmv', 'video')
}
# maps detected extensions to the possible analogs
# i.e. .cbz file is actually a .zip
synonyms_map = {
'.zip': ['.cbz'],
'.rar': ['.cbr'],
'.ar': ['.a']
}
log = logging.getLogger(__name__)
def guess_media_type(path):
_, ext = os.path.splitext(path)
extension = ext.strip().lower()
try:
kind = filetype.guess(path)
if kind:
real_extension = f".{kind.extension}"
if extension != real_extension:
if extension:
log.warning(f"file extension does not match it's contents: {path}, identified as {real_extension}")
else:
log.debug(f"file {path} does not have extension, identified by it's contents as {real_extension}")
if extension not in synonyms_map.get(real_extension, []):
extension = real_extension
except OSError as error:
pass
if extension[1:]:
if extension in types_map:
return types_map[extension]
return f'application/x-ext-{extension[1:]}', 'binary'
return 'application/octet-stream', 'binary'
def guess_stream_type(media_type):
for media, stream in types_map.values():
if media == media_type:
return stream
return 'binary'

View file

@ -1,47 +0,0 @@
from google.protobuf.message import DecodeError
from google.protobuf.json_format import MessageToDict
from lbry.schema.types.v2.purchase_pb2 import Purchase as PurchaseMessage
from .attrs import ClaimReference
class Purchase(ClaimReference):
START_BYTE = ord('P')
__slots__ = ()
def __init__(self, claim_id=None):
super().__init__(PurchaseMessage())
if claim_id is not None:
self.claim_id = claim_id
def to_dict(self):
return MessageToDict(self.message)
def to_message_bytes(self) -> bytes:
return self.message.SerializeToString()
def to_bytes(self) -> bytes:
pieces = bytearray()
pieces.append(self.START_BYTE)
pieces.extend(self.to_message_bytes())
return bytes(pieces)
@classmethod
def has_start_byte(cls, data: bytes):
return data and data[0] == cls.START_BYTE
@classmethod
def from_bytes(cls, data: bytes):
purchase = cls()
if purchase.has_start_byte(data):
purchase.message.ParseFromString(data[1:])
else:
raise DecodeError('Message does not start with correct byte.')
return purchase
def __len__(self):
return len(self.to_bytes())
def __bytes__(self):
return self.to_bytes()

View file

@ -1,232 +0,0 @@
import base64
from typing import List, TYPE_CHECKING, Union, Optional
from binascii import hexlify
from itertools import chain
from lbry.error import ResolveCensoredError
from lbry.schema.types.v2.result_pb2 import Outputs as OutputsMessage
from lbry.schema.types.v2.result_pb2 import Error as ErrorMessage
if TYPE_CHECKING:
from lbry.wallet.server.leveldb import ResolveResult
INVALID = ErrorMessage.Code.Name(ErrorMessage.INVALID)
NOT_FOUND = ErrorMessage.Code.Name(ErrorMessage.NOT_FOUND)
BLOCKED = ErrorMessage.Code.Name(ErrorMessage.BLOCKED)
def set_reference(reference, claim_hash, rows):
if claim_hash:
for txo in rows:
if claim_hash == txo.claim_hash:
reference.tx_hash = txo.tx_hash
reference.nout = txo.position
reference.height = txo.height
return
class Censor:
NOT_CENSORED = 0
SEARCH = 1
RESOLVE = 2
__slots__ = 'censor_type', 'censored'
def __init__(self, censor_type):
self.censor_type = censor_type
self.censored = {}
def is_censored(self, row):
return (row.get('censor_type') or self.NOT_CENSORED) >= self.censor_type
def apply(self, rows):
return [row for row in rows if not self.censor(row)]
def censor(self, row) -> Optional[bytes]:
if self.is_censored(row):
censoring_channel_hash = bytes.fromhex(row['censoring_channel_id'])[::-1]
self.censored.setdefault(censoring_channel_hash, set())
self.censored[censoring_channel_hash].add(row['tx_hash'])
return censoring_channel_hash
return None
def to_message(self, outputs: OutputsMessage, extra_txo_rows: dict):
for censoring_channel_hash, count in self.censored.items():
blocked = outputs.blocked.add()
blocked.count = len(count)
set_reference(blocked.channel, censoring_channel_hash, extra_txo_rows)
outputs.blocked_total += len(count)
class Outputs:
__slots__ = 'txos', 'extra_txos', 'txs', 'offset', 'total', 'blocked', 'blocked_total'
def __init__(self, txos: List, extra_txos: List, txs: set,
offset: int, total: int, blocked: List, blocked_total: int):
self.txos = txos
self.txs = txs
self.extra_txos = extra_txos
self.offset = offset
self.total = total
self.blocked = blocked
self.blocked_total = blocked_total
def inflate(self, txs):
tx_map = {tx.hash: tx for tx in txs}
for txo_message in self.extra_txos:
self.message_to_txo(txo_message, tx_map)
txos = [self.message_to_txo(txo_message, tx_map) for txo_message in self.txos]
return txos, self.inflate_blocked(tx_map)
def inflate_blocked(self, tx_map):
return {
"total": self.blocked_total,
"channels": [{
'channel': self.message_to_txo(blocked.channel, tx_map),
'blocked': blocked.count
} for blocked in self.blocked]
}
def message_to_txo(self, txo_message, tx_map):
if txo_message.WhichOneof('meta') == 'error':
error = {
'error': {
'name': txo_message.error.Code.Name(txo_message.error.code),
'text': txo_message.error.text,
}
}
if error['error']['name'] == BLOCKED:
error['error']['censor'] = self.message_to_txo(
txo_message.error.blocked.channel, tx_map
)
return error
tx = tx_map.get(txo_message.tx_hash)
if not tx:
return
txo = tx.outputs[txo_message.nout]
if txo_message.WhichOneof('meta') == 'claim':
claim = txo_message.claim
txo.meta = {
'short_url': f'lbry://{claim.short_url}',
'canonical_url': f'lbry://{claim.canonical_url or claim.short_url}',
'reposted': claim.reposted,
'is_controlling': claim.is_controlling,
'take_over_height': claim.take_over_height,
'creation_height': claim.creation_height,
'activation_height': claim.activation_height,
'expiration_height': claim.expiration_height,
'effective_amount': claim.effective_amount,
'support_amount': claim.support_amount,
# 'trending_group': claim.trending_group,
# 'trending_mixed': claim.trending_mixed,
# 'trending_local': claim.trending_local,
# 'trending_global': claim.trending_global,
}
if claim.HasField('channel'):
txo.channel = tx_map[claim.channel.tx_hash].outputs[claim.channel.nout]
if claim.HasField('repost'):
txo.reposted_claim = tx_map[claim.repost.tx_hash].outputs[claim.repost.nout]
try:
if txo.claim.is_channel:
txo.meta['claims_in_channel'] = claim.claims_in_channel
except:
pass
return txo
@classmethod
def from_base64(cls, data: str) -> 'Outputs':
return cls.from_bytes(base64.b64decode(data))
@classmethod
def from_bytes(cls, data: bytes) -> 'Outputs':
outputs = OutputsMessage()
outputs.ParseFromString(data)
txs = set()
for txo_message in chain(outputs.txos, outputs.extra_txos):
if txo_message.WhichOneof('meta') == 'error':
continue
txs.add((hexlify(txo_message.tx_hash[::-1]).decode(), txo_message.height))
return cls(
outputs.txos, outputs.extra_txos, txs,
outputs.offset, outputs.total,
outputs.blocked, outputs.blocked_total
)
@classmethod
def from_grpc(cls, outputs: OutputsMessage) -> 'Outputs':
txs = set()
for txo_message in chain(outputs.txos, outputs.extra_txos):
if txo_message.WhichOneof('meta') == 'error':
continue
txs.add((hexlify(txo_message.tx_hash[::-1]).decode(), txo_message.height))
return cls(
outputs.txos, outputs.extra_txos, txs,
outputs.offset, outputs.total,
outputs.blocked, outputs.blocked_total
)
@classmethod
def to_base64(cls, txo_rows, extra_txo_rows, offset=0, total=None, blocked=None) -> str:
return base64.b64encode(cls.to_bytes(txo_rows, extra_txo_rows, offset, total, blocked)).decode()
@classmethod
def to_bytes(cls, txo_rows, extra_txo_rows, offset=0, total=None, blocked: Censor = None) -> bytes:
page = OutputsMessage()
page.offset = offset
if total is not None:
page.total = total
if blocked is not None:
blocked.to_message(page, extra_txo_rows)
for row in extra_txo_rows:
txo_message: 'OutputsMessage' = page.extra_txos.add()
if not isinstance(row, Exception):
if row.channel_hash:
set_reference(txo_message.claim.channel, row.channel_hash, extra_txo_rows)
if row.reposted_claim_hash:
set_reference(txo_message.claim.repost, row.reposted_claim_hash, extra_txo_rows)
cls.encode_txo(txo_message, row)
for row in txo_rows:
# cls.row_to_message(row, page.txos.add(), extra_txo_rows)
txo_message: 'OutputsMessage' = page.txos.add()
cls.encode_txo(txo_message, row)
if not isinstance(row, Exception):
if row.channel_hash:
set_reference(txo_message.claim.channel, row.channel_hash, extra_txo_rows)
if row.reposted_claim_hash:
set_reference(txo_message.claim.repost, row.reposted_claim_hash, extra_txo_rows)
elif isinstance(row, ResolveCensoredError):
set_reference(txo_message.error.blocked.channel, row.censor_id, extra_txo_rows)
return page.SerializeToString()
@classmethod
def encode_txo(cls, txo_message, resolve_result: Union['ResolveResult', Exception]):
if isinstance(resolve_result, Exception):
txo_message.error.text = resolve_result.args[0]
if isinstance(resolve_result, ValueError):
txo_message.error.code = ErrorMessage.INVALID
elif isinstance(resolve_result, LookupError):
txo_message.error.code = ErrorMessage.NOT_FOUND
elif isinstance(resolve_result, ResolveCensoredError):
txo_message.error.code = ErrorMessage.BLOCKED
return
txo_message.tx_hash = resolve_result.tx_hash
txo_message.nout = resolve_result.position
txo_message.height = resolve_result.height
txo_message.claim.short_url = resolve_result.short_url
txo_message.claim.reposted = resolve_result.reposted
txo_message.claim.is_controlling = resolve_result.is_controlling
txo_message.claim.creation_height = resolve_result.creation_height
txo_message.claim.activation_height = resolve_result.activation_height
txo_message.claim.expiration_height = resolve_result.expiration_height
txo_message.claim.effective_amount = resolve_result.effective_amount
txo_message.claim.support_amount = resolve_result.support_amount
if resolve_result.canonical_url is not None:
txo_message.claim.canonical_url = resolve_result.canonical_url
if resolve_result.last_takeover_height is not None:
txo_message.claim.take_over_height = resolve_result.last_takeover_height
if resolve_result.claims_in_channel is not None:
txo_message.claim.claims_in_channel = resolve_result.claims_in_channel

View file

@ -1,23 +0,0 @@
from lbry.schema.base import Signable
from lbry.schema.types.v2.support_pb2 import Support as SupportMessage
class Support(Signable):
__slots__ = ()
message_class = SupportMessage
@property
def emoji(self) -> str:
return self.message.emoji
@emoji.setter
def emoji(self, emoji: str):
self.message.emoji = emoji
@property
def comment(self) -> str:
return self.message.comment
@comment.setter
def comment(self, comment: str):
self.message.comment = comment

View file

@ -1,13 +0,0 @@
from typing import List
import re
MULTI_SPACE_RE = re.compile(r"\s{2,}")
WEIRD_CHARS_RE = re.compile(r"[#!~]")
def normalize_tag(tag: str):
return MULTI_SPACE_RE.sub(' ', WEIRD_CHARS_RE.sub(' ', tag.lower().replace("'", ""))).strip()
def clean_tags(tags: List[str]):
return [tag for tag in {normalize_tag(tag) for tag in tags} if tag]

View file

@ -1,146 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: certificate.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf.internal import enum_type_wrapper
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='certificate.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x11\x63\x65rtificate.proto\x12\tlegacy_pb\"\xa2\x01\n\x0b\x43\x65rtificate\x12/\n\x07version\x18\x01 \x02(\x0e\x32\x1e.legacy_pb.Certificate.Version\x12#\n\x07keyType\x18\x02 \x02(\x0e\x32\x12.legacy_pb.KeyType\x12\x11\n\tpublicKey\x18\x04 \x02(\x0c\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01*Q\n\x07KeyType\x12\x1b\n\x17UNKNOWN_PUBLIC_KEY_TYPE\x10\x00\x12\x0c\n\x08NIST256p\x10\x01\x12\x0c\n\x08NIST384p\x10\x02\x12\r\n\tSECP256k1\x10\x03')
)
_KEYTYPE = _descriptor.EnumDescriptor(
name='KeyType',
full_name='legacy_pb.KeyType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_PUBLIC_KEY_TYPE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NIST256p', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='NIST384p', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='SECP256k1', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=197,
serialized_end=278,
)
_sym_db.RegisterEnumDescriptor(_KEYTYPE)
KeyType = enum_type_wrapper.EnumTypeWrapper(_KEYTYPE)
UNKNOWN_PUBLIC_KEY_TYPE = 0
NIST256p = 1
NIST384p = 2
SECP256k1 = 3
_CERTIFICATE_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Certificate.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=153,
serialized_end=195,
)
_sym_db.RegisterEnumDescriptor(_CERTIFICATE_VERSION)
_CERTIFICATE = _descriptor.Descriptor(
name='Certificate',
full_name='legacy_pb.Certificate',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Certificate.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='keyType', full_name='legacy_pb.Certificate.keyType', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publicKey', full_name='legacy_pb.Certificate.publicKey', index=2,
number=4, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CERTIFICATE_VERSION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=33,
serialized_end=195,
)
_CERTIFICATE.fields_by_name['version'].enum_type = _CERTIFICATE_VERSION
_CERTIFICATE.fields_by_name['keyType'].enum_type = _KEYTYPE
_CERTIFICATE_VERSION.containing_type = _CERTIFICATE
DESCRIPTOR.message_types_by_name['Certificate'] = _CERTIFICATE
DESCRIPTOR.enum_types_by_name['KeyType'] = _KEYTYPE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Certificate = _reflection.GeneratedProtocolMessageType('Certificate', (_message.Message,), dict(
DESCRIPTOR = _CERTIFICATE,
__module__ = 'certificate_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Certificate)
))
_sym_db.RegisterMessage(Certificate)
# @@protoc_insertion_point(module_scope)

View file

@ -1,148 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: fee.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='fee.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\tfee.proto\x12\tlegacy_pb\"\xe3\x01\n\x03\x46\x65\x65\x12\'\n\x07version\x18\x01 \x02(\x0e\x32\x16.legacy_pb.Fee.Version\x12)\n\x08\x63urrency\x18\x02 \x02(\x0e\x32\x17.legacy_pb.Fee.Currency\x12\x0f\n\x07\x61\x64\x64ress\x18\x03 \x02(\x0c\x12\x0e\n\x06\x61mount\x18\x04 \x02(\x02\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01\";\n\x08\x43urrency\x12\x14\n\x10UNKNOWN_CURRENCY\x10\x00\x12\x07\n\x03LBC\x10\x01\x12\x07\n\x03\x42TC\x10\x02\x12\x07\n\x03USD\x10\x03')
)
_FEE_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Fee.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=149,
serialized_end=191,
)
_sym_db.RegisterEnumDescriptor(_FEE_VERSION)
_FEE_CURRENCY = _descriptor.EnumDescriptor(
name='Currency',
full_name='legacy_pb.Fee.Currency',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_CURRENCY', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='LBC', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='BTC', index=2, number=2,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='USD', index=3, number=3,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=193,
serialized_end=252,
)
_sym_db.RegisterEnumDescriptor(_FEE_CURRENCY)
_FEE = _descriptor.Descriptor(
name='Fee',
full_name='legacy_pb.Fee',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Fee.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='currency', full_name='legacy_pb.Fee.currency', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='address', full_name='legacy_pb.Fee.address', index=2,
number=3, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='amount', full_name='legacy_pb.Fee.amount', index=3,
number=4, type=2, cpp_type=6, label=2,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_FEE_VERSION,
_FEE_CURRENCY,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=25,
serialized_end=252,
)
_FEE.fields_by_name['version'].enum_type = _FEE_VERSION
_FEE.fields_by_name['currency'].enum_type = _FEE_CURRENCY
_FEE_VERSION.containing_type = _FEE
_FEE_CURRENCY.containing_type = _FEE
DESCRIPTOR.message_types_by_name['Fee'] = _FEE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Fee = _reflection.GeneratedProtocolMessageType('Fee', (_message.Message,), dict(
DESCRIPTOR = _FEE,
__module__ = 'fee_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Fee)
))
_sym_db.RegisterMessage(Fee)
# @@protoc_insertion_point(module_scope)

View file

@ -1,158 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: legacy_claim.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import stream_pb2 as stream__pb2
from . import certificate_pb2 as certificate__pb2
from . import signature_pb2 as signature__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='legacy_claim.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x12legacy_claim.proto\x12\tlegacy_pb\x1a\x0cstream.proto\x1a\x11\x63\x65rtificate.proto\x1a\x0fsignature.proto\"\xd9\x02\n\x05\x43laim\x12)\n\x07version\x18\x01 \x02(\x0e\x32\x18.legacy_pb.Claim.Version\x12-\n\tclaimType\x18\x02 \x02(\x0e\x32\x1a.legacy_pb.Claim.ClaimType\x12!\n\x06stream\x18\x03 \x01(\x0b\x32\x11.legacy_pb.Stream\x12+\n\x0b\x63\x65rtificate\x18\x04 \x01(\x0b\x32\x16.legacy_pb.Certificate\x12\x30\n\x12publisherSignature\x18\x05 \x01(\x0b\x32\x14.legacy_pb.Signature\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01\"H\n\tClaimType\x12\x16\n\x12UNKNOWN_CLAIM_TYPE\x10\x00\x12\x0e\n\nstreamType\x10\x01\x12\x13\n\x0f\x63\x65rtificateType\x10\x02')
,
dependencies=[stream__pb2.DESCRIPTOR,certificate__pb2.DESCRIPTOR,signature__pb2.DESCRIPTOR,])
_CLAIM_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Claim.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=313,
serialized_end=355,
)
_sym_db.RegisterEnumDescriptor(_CLAIM_VERSION)
_CLAIM_CLAIMTYPE = _descriptor.EnumDescriptor(
name='ClaimType',
full_name='legacy_pb.Claim.ClaimType',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_CLAIM_TYPE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='streamType', index=1, number=1,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='certificateType', index=2, number=2,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=357,
serialized_end=429,
)
_sym_db.RegisterEnumDescriptor(_CLAIM_CLAIMTYPE)
_CLAIM = _descriptor.Descriptor(
name='Claim',
full_name='legacy_pb.Claim',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Claim.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='claimType', full_name='legacy_pb.Claim.claimType', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='stream', full_name='legacy_pb.Claim.stream', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='certificate', full_name='legacy_pb.Claim.certificate', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='publisherSignature', full_name='legacy_pb.Claim.publisherSignature', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_CLAIM_VERSION,
_CLAIM_CLAIMTYPE,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=84,
serialized_end=429,
)
_CLAIM.fields_by_name['version'].enum_type = _CLAIM_VERSION
_CLAIM.fields_by_name['claimType'].enum_type = _CLAIM_CLAIMTYPE
_CLAIM.fields_by_name['stream'].message_type = stream__pb2._STREAM
_CLAIM.fields_by_name['certificate'].message_type = certificate__pb2._CERTIFICATE
_CLAIM.fields_by_name['publisherSignature'].message_type = signature__pb2._SIGNATURE
_CLAIM_VERSION.containing_type = _CLAIM
_CLAIM_CLAIMTYPE.containing_type = _CLAIM
DESCRIPTOR.message_types_by_name['Claim'] = _CLAIM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Claim = _reflection.GeneratedProtocolMessageType('Claim', (_message.Message,), dict(
DESCRIPTOR = _CLAIM,
__module__ = 'legacy_claim_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Claim)
))
_sym_db.RegisterMessage(Claim)
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View file

@ -1,118 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: signature.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import certificate_pb2 as certificate__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='signature.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x0fsignature.proto\x12\tlegacy_pb\x1a\x11\x63\x65rtificate.proto\"\xbb\x01\n\tSignature\x12-\n\x07version\x18\x01 \x02(\x0e\x32\x1c.legacy_pb.Signature.Version\x12)\n\rsignatureType\x18\x02 \x02(\x0e\x32\x12.legacy_pb.KeyType\x12\x11\n\tsignature\x18\x03 \x02(\x0c\x12\x15\n\rcertificateId\x18\x04 \x02(\x0c\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01')
,
dependencies=[certificate__pb2.DESCRIPTOR,])
_SIGNATURE_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Signature.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=195,
serialized_end=237,
)
_sym_db.RegisterEnumDescriptor(_SIGNATURE_VERSION)
_SIGNATURE = _descriptor.Descriptor(
name='Signature',
full_name='legacy_pb.Signature',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Signature.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signatureType', full_name='legacy_pb.Signature.signatureType', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='signature', full_name='legacy_pb.Signature.signature', index=2,
number=3, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='certificateId', full_name='legacy_pb.Signature.certificateId', index=3,
number=4, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_SIGNATURE_VERSION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=50,
serialized_end=237,
)
_SIGNATURE.fields_by_name['version'].enum_type = _SIGNATURE_VERSION
_SIGNATURE.fields_by_name['signatureType'].enum_type = certificate__pb2._KEYTYPE
_SIGNATURE_VERSION.containing_type = _SIGNATURE
DESCRIPTOR.message_types_by_name['Signature'] = _SIGNATURE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Signature = _reflection.GeneratedProtocolMessageType('Signature', (_message.Message,), dict(
DESCRIPTOR = _SIGNATURE,
__module__ = 'signature_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Signature)
))
_sym_db.RegisterMessage(Signature)
# @@protoc_insertion_point(module_scope)

View file

@ -1,140 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: source.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='source.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x0csource.proto\x12\tlegacy_pb\"\xf2\x01\n\x06Source\x12*\n\x07version\x18\x01 \x02(\x0e\x32\x19.legacy_pb.Source.Version\x12\x31\n\nsourceType\x18\x02 \x02(\x0e\x32\x1d.legacy_pb.Source.SourceTypes\x12\x0e\n\x06source\x18\x03 \x02(\x0c\x12\x13\n\x0b\x63ontentType\x18\x04 \x02(\t\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01\"8\n\x0bSourceTypes\x12\x17\n\x13UNKNOWN_SOURCE_TYPE\x10\x00\x12\x10\n\x0clbry_sd_hash\x10\x01')
)
_SOURCE_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Source.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=170,
serialized_end=212,
)
_sym_db.RegisterEnumDescriptor(_SOURCE_VERSION)
_SOURCE_SOURCETYPES = _descriptor.EnumDescriptor(
name='SourceTypes',
full_name='legacy_pb.Source.SourceTypes',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_SOURCE_TYPE', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='lbry_sd_hash', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=214,
serialized_end=270,
)
_sym_db.RegisterEnumDescriptor(_SOURCE_SOURCETYPES)
_SOURCE = _descriptor.Descriptor(
name='Source',
full_name='legacy_pb.Source',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Source.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='sourceType', full_name='legacy_pb.Source.sourceType', index=1,
number=2, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source', full_name='legacy_pb.Source.source', index=2,
number=3, type=12, cpp_type=9, label=2,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='contentType', full_name='legacy_pb.Source.contentType', index=3,
number=4, type=9, cpp_type=9, label=2,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_SOURCE_VERSION,
_SOURCE_SOURCETYPES,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=28,
serialized_end=270,
)
_SOURCE.fields_by_name['version'].enum_type = _SOURCE_VERSION
_SOURCE.fields_by_name['sourceType'].enum_type = _SOURCE_SOURCETYPES
_SOURCE_VERSION.containing_type = _SOURCE
_SOURCE_SOURCETYPES.containing_type = _SOURCE
DESCRIPTOR.message_types_by_name['Source'] = _SOURCE
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Source = _reflection.GeneratedProtocolMessageType('Source', (_message.Message,), dict(
DESCRIPTOR = _SOURCE,
__module__ = 'source_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Source)
))
_sym_db.RegisterMessage(Source)
# @@protoc_insertion_point(module_scope)

View file

@ -1,113 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: stream.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import metadata_pb2 as metadata__pb2
from . import source_pb2 as source__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='stream.proto',
package='legacy_pb',
syntax='proto2',
serialized_options=None,
serialized_pb=_b('\n\x0cstream.proto\x12\tlegacy_pb\x1a\x0emetadata.proto\x1a\x0csource.proto\"\xaa\x01\n\x06Stream\x12*\n\x07version\x18\x01 \x02(\x0e\x32\x19.legacy_pb.Stream.Version\x12%\n\x08metadata\x18\x02 \x02(\x0b\x32\x13.legacy_pb.Metadata\x12!\n\x06source\x18\x03 \x02(\x0b\x32\x11.legacy_pb.Source\"*\n\x07Version\x12\x13\n\x0fUNKNOWN_VERSION\x10\x00\x12\n\n\x06_0_0_1\x10\x01')
,
dependencies=[metadata__pb2.DESCRIPTOR,source__pb2.DESCRIPTOR,])
_STREAM_VERSION = _descriptor.EnumDescriptor(
name='Version',
full_name='legacy_pb.Stream.Version',
filename=None,
file=DESCRIPTOR,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_VERSION', index=0, number=0,
serialized_options=None,
type=None),
_descriptor.EnumValueDescriptor(
name='_0_0_1', index=1, number=1,
serialized_options=None,
type=None),
],
containing_type=None,
serialized_options=None,
serialized_start=186,
serialized_end=228,
)
_sym_db.RegisterEnumDescriptor(_STREAM_VERSION)
_STREAM = _descriptor.Descriptor(
name='Stream',
full_name='legacy_pb.Stream',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='version', full_name='legacy_pb.Stream.version', index=0,
number=1, type=14, cpp_type=8, label=2,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='metadata', full_name='legacy_pb.Stream.metadata', index=1,
number=2, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='source', full_name='legacy_pb.Stream.source', index=2,
number=3, type=11, cpp_type=10, label=2,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
_STREAM_VERSION,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=58,
serialized_end=228,
)
_STREAM.fields_by_name['version'].enum_type = _STREAM_VERSION
_STREAM.fields_by_name['metadata'].message_type = metadata__pb2._METADATA
_STREAM.fields_by_name['source'].message_type = source__pb2._SOURCE
_STREAM_VERSION.containing_type = _STREAM
DESCRIPTOR.message_types_by_name['Stream'] = _STREAM
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Stream = _reflection.GeneratedProtocolMessageType('Stream', (_message.Message,), dict(
DESCRIPTOR = _STREAM,
__module__ = 'stream_pb2'
# @@protoc_insertion_point(class_scope:legacy_pb.Stream)
))
_sym_db.RegisterMessage(Stream)
# @@protoc_insertion_point(module_scope)

File diff suppressed because one or more lines are too long

View file

@ -1,960 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: hub.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from . import result_pb2 as result__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='hub.proto',
package='pb',
syntax='proto3',
serialized_options=b'Z$github.com/lbryio/hub/protobuf/go/pb',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\thub.proto\x12\x02pb\x1a\x0cresult.proto\"\x0e\n\x0c\x45mptyMessage\".\n\rServerMessage\x12\x0f\n\x07\x61\x64\x64ress\x18\x01 \x01(\t\x12\x0c\n\x04port\x18\x02 \x01(\t\"N\n\x0cHelloMessage\x12\x0c\n\x04port\x18\x01 \x01(\t\x12\x0c\n\x04host\x18\x02 \x01(\t\x12\"\n\x07servers\x18\x03 \x03(\x0b\x32\x11.pb.ServerMessage\"0\n\x0fInvertibleField\x12\x0e\n\x06invert\x18\x01 \x01(\x08\x12\r\n\x05value\x18\x02 \x03(\t\"\x1c\n\x0bStringValue\x12\r\n\x05value\x18\x01 \x01(\t\"\x1a\n\tBoolValue\x12\r\n\x05value\x18\x01 \x01(\x08\"\x1c\n\x0bUInt32Value\x12\r\n\x05value\x18\x01 \x01(\r\"j\n\nRangeField\x12\x1d\n\x02op\x18\x01 \x01(\x0e\x32\x11.pb.RangeField.Op\x12\r\n\x05value\x18\x02 \x03(\x05\".\n\x02Op\x12\x06\n\x02\x45Q\x10\x00\x12\x07\n\x03LTE\x10\x01\x12\x07\n\x03GTE\x10\x02\x12\x06\n\x02LT\x10\x03\x12\x06\n\x02GT\x10\x04\"\x8e\x0c\n\rSearchRequest\x12%\n\x08\x63laim_id\x18\x01 \x01(\x0b\x32\x13.pb.InvertibleField\x12\'\n\nchannel_id\x18\x02 \x01(\x0b\x32\x13.pb.InvertibleField\x12\x0c\n\x04text\x18\x03 \x01(\t\x12\r\n\x05limit\x18\x04 \x01(\x05\x12\x10\n\x08order_by\x18\x05 \x03(\t\x12\x0e\n\x06offset\x18\x06 \x01(\r\x12\x16\n\x0eis_controlling\x18\x07 \x01(\x08\x12\x1d\n\x15last_take_over_height\x18\x08 \x01(\t\x12\x12\n\nclaim_name\x18\t \x01(\t\x12\x17\n\x0fnormalized_name\x18\n \x01(\t\x12#\n\x0btx_position\x18\x0b \x03(\x0b\x32\x0e.pb.RangeField\x12\x1e\n\x06\x61mount\x18\x0c \x03(\x0b\x32\x0e.pb.RangeField\x12!\n\ttimestamp\x18\r \x03(\x0b\x32\x0e.pb.RangeField\x12*\n\x12\x63reation_timestamp\x18\x0e \x03(\x0b\x32\x0e.pb.RangeField\x12\x1e\n\x06height\x18\x0f \x03(\x0b\x32\x0e.pb.RangeField\x12\'\n\x0f\x63reation_height\x18\x10 \x03(\x0b\x32\x0e.pb.RangeField\x12)\n\x11\x61\x63tivation_height\x18\x11 \x03(\x0b\x32\x0e.pb.RangeField\x12)\n\x11\x65xpiration_height\x18\x12 \x03(\x0b\x32\x0e.pb.RangeField\x12$\n\x0crelease_time\x18\x13 \x03(\x0b\x32\x0e.pb.RangeField\x12\x11\n\tshort_url\x18\x14 \x01(\t\x12\x15\n\rcanonical_url\x18\x15 \x01(\t\x12\r\n\x05title\x18\x16 \x01(\t\x12\x0e\n\x06\x61uthor\x18\x17 \x01(\t\x12\x13\n\x0b\x64\x65scription\x18\x18 \x01(\t\x12\x12\n\nclaim_type\x18\x19 \x03(\t\x12$\n\x0crepost_count\x18\x1a \x03(\x0b\x32\x0e.pb.RangeField\x12\x13\n\x0bstream_type\x18\x1b \x03(\t\x12\x12\n\nmedia_type\x18\x1c \x03(\t\x12\"\n\nfee_amount\x18\x1d \x03(\x0b\x32\x0e.pb.RangeField\x12\x14\n\x0c\x66\x65\x65_currency\x18\x1e \x01(\t\x12 \n\x08\x64uration\x18\x1f \x03(\x0b\x32\x0e.pb.RangeField\x12\x19\n\x11reposted_claim_id\x18 \x01(\t\x12#\n\x0b\x63\x65nsor_type\x18! \x03(\x0b\x32\x0e.pb.RangeField\x12\x19\n\x11\x63laims_in_channel\x18\" \x01(\t\x12)\n\x12is_signature_valid\x18$ \x01(\x0b\x32\r.pb.BoolValue\x12(\n\x10\x65\x66\x66\x65\x63tive_amount\x18% \x03(\x0b\x32\x0e.pb.RangeField\x12&\n\x0esupport_amount\x18& \x03(\x0b\x32\x0e.pb.RangeField\x12&\n\x0etrending_score\x18\' \x03(\x0b\x32\x0e.pb.RangeField\x12\r\n\x05tx_id\x18+ \x01(\t\x12 \n\x07tx_nout\x18, \x01(\x0b\x32\x0f.pb.UInt32Value\x12\x11\n\tsignature\x18- \x01(\t\x12\x18\n\x10signature_digest\x18. \x01(\t\x12\x18\n\x10public_key_bytes\x18/ \x01(\t\x12\x15\n\rpublic_key_id\x18\x30 \x01(\t\x12\x10\n\x08\x61ny_tags\x18\x31 \x03(\t\x12\x10\n\x08\x61ll_tags\x18\x32 \x03(\t\x12\x10\n\x08not_tags\x18\x33 \x03(\t\x12\x1d\n\x15has_channel_signature\x18\x34 \x01(\x08\x12!\n\nhas_source\x18\x35 \x01(\x0b\x32\r.pb.BoolValue\x12 \n\x18limit_claims_per_channel\x18\x36 \x01(\x05\x12\x15\n\rany_languages\x18\x37 \x03(\t\x12\x15\n\rall_languages\x18\x38 \x03(\t\x12\x19\n\x11remove_duplicates\x18\x39 \x01(\x08\x12\x11\n\tno_totals\x18: \x01(\x08\x12\x0f\n\x07sd_hash\x18; \x01(\t2\x88\x03\n\x03Hub\x12*\n\x06Search\x12\x11.pb.SearchRequest\x1a\x0b.pb.Outputs\"\x00\x12+\n\x04Ping\x12\x10.pb.EmptyMessage\x1a\x0f.pb.StringValue\"\x00\x12-\n\x05Hello\x12\x10.pb.HelloMessage\x1a\x10.pb.HelloMessage\"\x00\x12/\n\x07\x41\x64\x64Peer\x12\x11.pb.ServerMessage\x1a\x0f.pb.StringValue\"\x00\x12\x35\n\rPeerSubscribe\x12\x11.pb.ServerMessage\x1a\x0f.pb.StringValue\"\x00\x12.\n\x07Version\x12\x10.pb.EmptyMessage\x1a\x0f.pb.StringValue\"\x00\x12/\n\x08\x46\x65\x61tures\x12\x10.pb.EmptyMessage\x1a\x0f.pb.StringValue\"\x00\x12\x30\n\tBroadcast\x12\x10.pb.EmptyMessage\x1a\x0f.pb.UInt32Value\"\x00\x42&Z$github.com/lbryio/hub/protobuf/go/pbb\x06proto3'
,
dependencies=[result__pb2.DESCRIPTOR,])
_RANGEFIELD_OP = _descriptor.EnumDescriptor(
name='Op',
full_name='pb.RangeField.Op',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='EQ', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LTE', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GTE', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='LT', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='GT', index=4, number=4,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=373,
serialized_end=419,
)
_sym_db.RegisterEnumDescriptor(_RANGEFIELD_OP)
_EMPTYMESSAGE = _descriptor.Descriptor(
name='EmptyMessage',
full_name='pb.EmptyMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=31,
serialized_end=45,
)
_SERVERMESSAGE = _descriptor.Descriptor(
name='ServerMessage',
full_name='pb.ServerMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='address', full_name='pb.ServerMessage.address', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='port', full_name='pb.ServerMessage.port', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=47,
serialized_end=93,
)
_HELLOMESSAGE = _descriptor.Descriptor(
name='HelloMessage',
full_name='pb.HelloMessage',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='port', full_name='pb.HelloMessage.port', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='host', full_name='pb.HelloMessage.host', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='servers', full_name='pb.HelloMessage.servers', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=95,
serialized_end=173,
)
_INVERTIBLEFIELD = _descriptor.Descriptor(
name='InvertibleField',
full_name='pb.InvertibleField',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='invert', full_name='pb.InvertibleField.invert', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='pb.InvertibleField.value', index=1,
number=2, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=175,
serialized_end=223,
)
_STRINGVALUE = _descriptor.Descriptor(
name='StringValue',
full_name='pb.StringValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='pb.StringValue.value', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=225,
serialized_end=253,
)
_BOOLVALUE = _descriptor.Descriptor(
name='BoolValue',
full_name='pb.BoolValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='pb.BoolValue.value', index=0,
number=1, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=255,
serialized_end=281,
)
_UINT32VALUE = _descriptor.Descriptor(
name='UInt32Value',
full_name='pb.UInt32Value',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='value', full_name='pb.UInt32Value.value', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=283,
serialized_end=311,
)
_RANGEFIELD = _descriptor.Descriptor(
name='RangeField',
full_name='pb.RangeField',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='op', full_name='pb.RangeField.op', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='pb.RangeField.value', index=1,
number=2, type=5, cpp_type=1, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_RANGEFIELD_OP,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=313,
serialized_end=419,
)
_SEARCHREQUEST = _descriptor.Descriptor(
name='SearchRequest',
full_name='pb.SearchRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='claim_id', full_name='pb.SearchRequest.claim_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='channel_id', full_name='pb.SearchRequest.channel_id', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='pb.SearchRequest.text', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit', full_name='pb.SearchRequest.limit', index=3,
number=4, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='order_by', full_name='pb.SearchRequest.order_by', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='pb.SearchRequest.offset', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_controlling', full_name='pb.SearchRequest.is_controlling', index=6,
number=7, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='last_take_over_height', full_name='pb.SearchRequest.last_take_over_height', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claim_name', full_name='pb.SearchRequest.claim_name', index=8,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='normalized_name', full_name='pb.SearchRequest.normalized_name', index=9,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tx_position', full_name='pb.SearchRequest.tx_position', index=10,
number=11, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='amount', full_name='pb.SearchRequest.amount', index=11,
number=12, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='timestamp', full_name='pb.SearchRequest.timestamp', index=12,
number=13, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creation_timestamp', full_name='pb.SearchRequest.creation_timestamp', index=13,
number=14, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='height', full_name='pb.SearchRequest.height', index=14,
number=15, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creation_height', full_name='pb.SearchRequest.creation_height', index=15,
number=16, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='activation_height', full_name='pb.SearchRequest.activation_height', index=16,
number=17, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expiration_height', full_name='pb.SearchRequest.expiration_height', index=17,
number=18, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='release_time', full_name='pb.SearchRequest.release_time', index=18,
number=19, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='short_url', full_name='pb.SearchRequest.short_url', index=19,
number=20, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='canonical_url', full_name='pb.SearchRequest.canonical_url', index=20,
number=21, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='title', full_name='pb.SearchRequest.title', index=21,
number=22, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='author', full_name='pb.SearchRequest.author', index=22,
number=23, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='description', full_name='pb.SearchRequest.description', index=23,
number=24, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claim_type', full_name='pb.SearchRequest.claim_type', index=24,
number=25, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repost_count', full_name='pb.SearchRequest.repost_count', index=25,
number=26, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='stream_type', full_name='pb.SearchRequest.stream_type', index=26,
number=27, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='media_type', full_name='pb.SearchRequest.media_type', index=27,
number=28, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fee_amount', full_name='pb.SearchRequest.fee_amount', index=28,
number=29, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='fee_currency', full_name='pb.SearchRequest.fee_currency', index=29,
number=30, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='duration', full_name='pb.SearchRequest.duration', index=30,
number=31, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reposted_claim_id', full_name='pb.SearchRequest.reposted_claim_id', index=31,
number=32, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='censor_type', full_name='pb.SearchRequest.censor_type', index=32,
number=33, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claims_in_channel', full_name='pb.SearchRequest.claims_in_channel', index=33,
number=34, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_signature_valid', full_name='pb.SearchRequest.is_signature_valid', index=34,
number=36, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_amount', full_name='pb.SearchRequest.effective_amount', index=35,
number=37, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='support_amount', full_name='pb.SearchRequest.support_amount', index=36,
number=38, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trending_score', full_name='pb.SearchRequest.trending_score', index=37,
number=39, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tx_id', full_name='pb.SearchRequest.tx_id', index=38,
number=43, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tx_nout', full_name='pb.SearchRequest.tx_nout', index=39,
number=44, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature', full_name='pb.SearchRequest.signature', index=40,
number=45, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='signature_digest', full_name='pb.SearchRequest.signature_digest', index=41,
number=46, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='public_key_bytes', full_name='pb.SearchRequest.public_key_bytes', index=42,
number=47, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='public_key_id', full_name='pb.SearchRequest.public_key_id', index=43,
number=48, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='any_tags', full_name='pb.SearchRequest.any_tags', index=44,
number=49, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='all_tags', full_name='pb.SearchRequest.all_tags', index=45,
number=50, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='not_tags', full_name='pb.SearchRequest.not_tags', index=46,
number=51, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='has_channel_signature', full_name='pb.SearchRequest.has_channel_signature', index=47,
number=52, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='has_source', full_name='pb.SearchRequest.has_source', index=48,
number=53, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='limit_claims_per_channel', full_name='pb.SearchRequest.limit_claims_per_channel', index=49,
number=54, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='any_languages', full_name='pb.SearchRequest.any_languages', index=50,
number=55, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='all_languages', full_name='pb.SearchRequest.all_languages', index=51,
number=56, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='remove_duplicates', full_name='pb.SearchRequest.remove_duplicates', index=52,
number=57, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='no_totals', full_name='pb.SearchRequest.no_totals', index=53,
number=58, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sd_hash', full_name='pb.SearchRequest.sd_hash', index=54,
number=59, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=422,
serialized_end=1972,
)
_HELLOMESSAGE.fields_by_name['servers'].message_type = _SERVERMESSAGE
_RANGEFIELD.fields_by_name['op'].enum_type = _RANGEFIELD_OP
_RANGEFIELD_OP.containing_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['claim_id'].message_type = _INVERTIBLEFIELD
_SEARCHREQUEST.fields_by_name['channel_id'].message_type = _INVERTIBLEFIELD
_SEARCHREQUEST.fields_by_name['tx_position'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['amount'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['timestamp'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['creation_timestamp'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['height'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['creation_height'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['activation_height'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['expiration_height'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['release_time'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['repost_count'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['fee_amount'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['duration'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['censor_type'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['is_signature_valid'].message_type = _BOOLVALUE
_SEARCHREQUEST.fields_by_name['effective_amount'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['support_amount'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['trending_score'].message_type = _RANGEFIELD
_SEARCHREQUEST.fields_by_name['tx_nout'].message_type = _UINT32VALUE
_SEARCHREQUEST.fields_by_name['has_source'].message_type = _BOOLVALUE
DESCRIPTOR.message_types_by_name['EmptyMessage'] = _EMPTYMESSAGE
DESCRIPTOR.message_types_by_name['ServerMessage'] = _SERVERMESSAGE
DESCRIPTOR.message_types_by_name['HelloMessage'] = _HELLOMESSAGE
DESCRIPTOR.message_types_by_name['InvertibleField'] = _INVERTIBLEFIELD
DESCRIPTOR.message_types_by_name['StringValue'] = _STRINGVALUE
DESCRIPTOR.message_types_by_name['BoolValue'] = _BOOLVALUE
DESCRIPTOR.message_types_by_name['UInt32Value'] = _UINT32VALUE
DESCRIPTOR.message_types_by_name['RangeField'] = _RANGEFIELD
DESCRIPTOR.message_types_by_name['SearchRequest'] = _SEARCHREQUEST
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
EmptyMessage = _reflection.GeneratedProtocolMessageType('EmptyMessage', (_message.Message,), {
'DESCRIPTOR' : _EMPTYMESSAGE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.EmptyMessage)
})
_sym_db.RegisterMessage(EmptyMessage)
ServerMessage = _reflection.GeneratedProtocolMessageType('ServerMessage', (_message.Message,), {
'DESCRIPTOR' : _SERVERMESSAGE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.ServerMessage)
})
_sym_db.RegisterMessage(ServerMessage)
HelloMessage = _reflection.GeneratedProtocolMessageType('HelloMessage', (_message.Message,), {
'DESCRIPTOR' : _HELLOMESSAGE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.HelloMessage)
})
_sym_db.RegisterMessage(HelloMessage)
InvertibleField = _reflection.GeneratedProtocolMessageType('InvertibleField', (_message.Message,), {
'DESCRIPTOR' : _INVERTIBLEFIELD,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.InvertibleField)
})
_sym_db.RegisterMessage(InvertibleField)
StringValue = _reflection.GeneratedProtocolMessageType('StringValue', (_message.Message,), {
'DESCRIPTOR' : _STRINGVALUE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.StringValue)
})
_sym_db.RegisterMessage(StringValue)
BoolValue = _reflection.GeneratedProtocolMessageType('BoolValue', (_message.Message,), {
'DESCRIPTOR' : _BOOLVALUE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.BoolValue)
})
_sym_db.RegisterMessage(BoolValue)
UInt32Value = _reflection.GeneratedProtocolMessageType('UInt32Value', (_message.Message,), {
'DESCRIPTOR' : _UINT32VALUE,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.UInt32Value)
})
_sym_db.RegisterMessage(UInt32Value)
RangeField = _reflection.GeneratedProtocolMessageType('RangeField', (_message.Message,), {
'DESCRIPTOR' : _RANGEFIELD,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.RangeField)
})
_sym_db.RegisterMessage(RangeField)
SearchRequest = _reflection.GeneratedProtocolMessageType('SearchRequest', (_message.Message,), {
'DESCRIPTOR' : _SEARCHREQUEST,
'__module__' : 'hub_pb2'
# @@protoc_insertion_point(class_scope:pb.SearchRequest)
})
_sym_db.RegisterMessage(SearchRequest)
DESCRIPTOR._options = None
_HUB = _descriptor.ServiceDescriptor(
name='Hub',
full_name='pb.Hub',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1975,
serialized_end=2367,
methods=[
_descriptor.MethodDescriptor(
name='Search',
full_name='pb.Hub.Search',
index=0,
containing_service=None,
input_type=_SEARCHREQUEST,
output_type=result__pb2._OUTPUTS,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Ping',
full_name='pb.Hub.Ping',
index=1,
containing_service=None,
input_type=_EMPTYMESSAGE,
output_type=_STRINGVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Hello',
full_name='pb.Hub.Hello',
index=2,
containing_service=None,
input_type=_HELLOMESSAGE,
output_type=_HELLOMESSAGE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='AddPeer',
full_name='pb.Hub.AddPeer',
index=3,
containing_service=None,
input_type=_SERVERMESSAGE,
output_type=_STRINGVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='PeerSubscribe',
full_name='pb.Hub.PeerSubscribe',
index=4,
containing_service=None,
input_type=_SERVERMESSAGE,
output_type=_STRINGVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Version',
full_name='pb.Hub.Version',
index=5,
containing_service=None,
input_type=_EMPTYMESSAGE,
output_type=_STRINGVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Features',
full_name='pb.Hub.Features',
index=6,
containing_service=None,
input_type=_EMPTYMESSAGE,
output_type=_STRINGVALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='Broadcast',
full_name='pb.Hub.Broadcast',
index=7,
containing_service=None,
input_type=_EMPTYMESSAGE,
output_type=_UINT32VALUE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_HUB)
DESCRIPTOR.services_by_name['Hub'] = _HUB
# @@protoc_insertion_point(module_scope)

View file

@ -1,298 +0,0 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc
from . import hub_pb2 as hub__pb2
from . import result_pb2 as result__pb2
class HubStub(object):
"""Missing associated documentation comment in .proto file."""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.Search = channel.unary_unary(
'/pb.Hub/Search',
request_serializer=hub__pb2.SearchRequest.SerializeToString,
response_deserializer=result__pb2.Outputs.FromString,
)
self.Ping = channel.unary_unary(
'/pb.Hub/Ping',
request_serializer=hub__pb2.EmptyMessage.SerializeToString,
response_deserializer=hub__pb2.StringValue.FromString,
)
self.Hello = channel.unary_unary(
'/pb.Hub/Hello',
request_serializer=hub__pb2.HelloMessage.SerializeToString,
response_deserializer=hub__pb2.HelloMessage.FromString,
)
self.AddPeer = channel.unary_unary(
'/pb.Hub/AddPeer',
request_serializer=hub__pb2.ServerMessage.SerializeToString,
response_deserializer=hub__pb2.StringValue.FromString,
)
self.PeerSubscribe = channel.unary_unary(
'/pb.Hub/PeerSubscribe',
request_serializer=hub__pb2.ServerMessage.SerializeToString,
response_deserializer=hub__pb2.StringValue.FromString,
)
self.Version = channel.unary_unary(
'/pb.Hub/Version',
request_serializer=hub__pb2.EmptyMessage.SerializeToString,
response_deserializer=hub__pb2.StringValue.FromString,
)
self.Features = channel.unary_unary(
'/pb.Hub/Features',
request_serializer=hub__pb2.EmptyMessage.SerializeToString,
response_deserializer=hub__pb2.StringValue.FromString,
)
self.Broadcast = channel.unary_unary(
'/pb.Hub/Broadcast',
request_serializer=hub__pb2.EmptyMessage.SerializeToString,
response_deserializer=hub__pb2.UInt32Value.FromString,
)
class HubServicer(object):
"""Missing associated documentation comment in .proto file."""
def Search(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Ping(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Hello(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AddPeer(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def PeerSubscribe(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Version(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Features(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def Broadcast(self, request, context):
"""Missing associated documentation comment in .proto file."""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_HubServicer_to_server(servicer, server):
rpc_method_handlers = {
'Search': grpc.unary_unary_rpc_method_handler(
servicer.Search,
request_deserializer=hub__pb2.SearchRequest.FromString,
response_serializer=result__pb2.Outputs.SerializeToString,
),
'Ping': grpc.unary_unary_rpc_method_handler(
servicer.Ping,
request_deserializer=hub__pb2.EmptyMessage.FromString,
response_serializer=hub__pb2.StringValue.SerializeToString,
),
'Hello': grpc.unary_unary_rpc_method_handler(
servicer.Hello,
request_deserializer=hub__pb2.HelloMessage.FromString,
response_serializer=hub__pb2.HelloMessage.SerializeToString,
),
'AddPeer': grpc.unary_unary_rpc_method_handler(
servicer.AddPeer,
request_deserializer=hub__pb2.ServerMessage.FromString,
response_serializer=hub__pb2.StringValue.SerializeToString,
),
'PeerSubscribe': grpc.unary_unary_rpc_method_handler(
servicer.PeerSubscribe,
request_deserializer=hub__pb2.ServerMessage.FromString,
response_serializer=hub__pb2.StringValue.SerializeToString,
),
'Version': grpc.unary_unary_rpc_method_handler(
servicer.Version,
request_deserializer=hub__pb2.EmptyMessage.FromString,
response_serializer=hub__pb2.StringValue.SerializeToString,
),
'Features': grpc.unary_unary_rpc_method_handler(
servicer.Features,
request_deserializer=hub__pb2.EmptyMessage.FromString,
response_serializer=hub__pb2.StringValue.SerializeToString,
),
'Broadcast': grpc.unary_unary_rpc_method_handler(
servicer.Broadcast,
request_deserializer=hub__pb2.EmptyMessage.FromString,
response_serializer=hub__pb2.UInt32Value.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'pb.Hub', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
# This class is part of an EXPERIMENTAL API.
class Hub(object):
"""Missing associated documentation comment in .proto file."""
@staticmethod
def Search(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Search',
hub__pb2.SearchRequest.SerializeToString,
result__pb2.Outputs.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Ping(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Ping',
hub__pb2.EmptyMessage.SerializeToString,
hub__pb2.StringValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Hello(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Hello',
hub__pb2.HelloMessage.SerializeToString,
hub__pb2.HelloMessage.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def AddPeer(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/AddPeer',
hub__pb2.ServerMessage.SerializeToString,
hub__pb2.StringValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def PeerSubscribe(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/PeerSubscribe',
hub__pb2.ServerMessage.SerializeToString,
hub__pb2.StringValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Version(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Version',
hub__pb2.EmptyMessage.SerializeToString,
hub__pb2.StringValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Features(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Features',
hub__pb2.EmptyMessage.SerializeToString,
hub__pb2.StringValue.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)
@staticmethod
def Broadcast(request,
target,
options=(),
channel_credentials=None,
call_credentials=None,
insecure=False,
compression=None,
wait_for_ready=None,
timeout=None,
metadata=None):
return grpc.experimental.unary_unary(request, target, '/pb.Hub/Broadcast',
hub__pb2.EmptyMessage.SerializeToString,
hub__pb2.UInt32Value.FromString,
options, channel_credentials,
insecure, call_credentials, compression, wait_for_ready, timeout, metadata)

View file

@ -1,69 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: purchase.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='purchase.proto',
package='pb',
syntax='proto3',
serialized_pb=_b('\n\x0epurchase.proto\x12\x02pb\"\x1e\n\x08Purchase\x12\x12\n\nclaim_hash\x18\x01 \x01(\x0c\x62\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PURCHASE = _descriptor.Descriptor(
name='Purchase',
full_name='pb.Purchase',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='claim_hash', full_name='pb.Purchase.claim_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=_b(""),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=22,
serialized_end=52,
)
DESCRIPTOR.message_types_by_name['Purchase'] = _PURCHASE
Purchase = _reflection.GeneratedProtocolMessageType('Purchase', (_message.Message,), dict(
DESCRIPTOR = _PURCHASE,
__module__ = 'purchase_pb2'
# @@protoc_insertion_point(class_scope:pb.Purchase)
))
_sym_db.RegisterMessage(Purchase)
# @@protoc_insertion_point(module_scope)

View file

@ -1,464 +0,0 @@
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: result.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='result.proto',
package='pb',
syntax='proto3',
serialized_options=b'Z$github.com/lbryio/hub/protobuf/go/pb',
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x0cresult.proto\x12\x02pb\"\x97\x01\n\x07Outputs\x12\x18\n\x04txos\x18\x01 \x03(\x0b\x32\n.pb.Output\x12\x1e\n\nextra_txos\x18\x02 \x03(\x0b\x32\n.pb.Output\x12\r\n\x05total\x18\x03 \x01(\r\x12\x0e\n\x06offset\x18\x04 \x01(\r\x12\x1c\n\x07\x62locked\x18\x05 \x03(\x0b\x32\x0b.pb.Blocked\x12\x15\n\rblocked_total\x18\x06 \x01(\r\"{\n\x06Output\x12\x0f\n\x07tx_hash\x18\x01 \x01(\x0c\x12\x0c\n\x04nout\x18\x02 \x01(\r\x12\x0e\n\x06height\x18\x03 \x01(\r\x12\x1e\n\x05\x63laim\x18\x07 \x01(\x0b\x32\r.pb.ClaimMetaH\x00\x12\x1a\n\x05\x65rror\x18\x0f \x01(\x0b\x32\t.pb.ErrorH\x00\x42\x06\n\x04meta\"\xe6\x02\n\tClaimMeta\x12\x1b\n\x07\x63hannel\x18\x01 \x01(\x0b\x32\n.pb.Output\x12\x1a\n\x06repost\x18\x02 \x01(\x0b\x32\n.pb.Output\x12\x11\n\tshort_url\x18\x03 \x01(\t\x12\x15\n\rcanonical_url\x18\x04 \x01(\t\x12\x16\n\x0eis_controlling\x18\x05 \x01(\x08\x12\x18\n\x10take_over_height\x18\x06 \x01(\r\x12\x17\n\x0f\x63reation_height\x18\x07 \x01(\r\x12\x19\n\x11\x61\x63tivation_height\x18\x08 \x01(\r\x12\x19\n\x11\x65xpiration_height\x18\t \x01(\r\x12\x19\n\x11\x63laims_in_channel\x18\n \x01(\r\x12\x10\n\x08reposted\x18\x0b \x01(\r\x12\x18\n\x10\x65\x66\x66\x65\x63tive_amount\x18\x14 \x01(\x04\x12\x16\n\x0esupport_amount\x18\x15 \x01(\x04\x12\x16\n\x0etrending_score\x18\x16 \x01(\x01\"\x94\x01\n\x05\x45rror\x12\x1c\n\x04\x63ode\x18\x01 \x01(\x0e\x32\x0e.pb.Error.Code\x12\x0c\n\x04text\x18\x02 \x01(\t\x12\x1c\n\x07\x62locked\x18\x03 \x01(\x0b\x32\x0b.pb.Blocked\"A\n\x04\x43ode\x12\x10\n\x0cUNKNOWN_CODE\x10\x00\x12\r\n\tNOT_FOUND\x10\x01\x12\x0b\n\x07INVALID\x10\x02\x12\x0b\n\x07\x42LOCKED\x10\x03\"5\n\x07\x42locked\x12\r\n\x05\x63ount\x18\x01 \x01(\r\x12\x1b\n\x07\x63hannel\x18\x02 \x01(\x0b\x32\n.pb.OutputB&Z$github.com/lbryio/hub/protobuf/go/pbb\x06proto3'
)
_ERROR_CODE = _descriptor.EnumDescriptor(
name='Code',
full_name='pb.Error.Code',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='UNKNOWN_CODE', index=0, number=0,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='NOT_FOUND', index=1, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='INVALID', index=2, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='BLOCKED', index=3, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=744,
serialized_end=809,
)
_sym_db.RegisterEnumDescriptor(_ERROR_CODE)
_OUTPUTS = _descriptor.Descriptor(
name='Outputs',
full_name='pb.Outputs',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='txos', full_name='pb.Outputs.txos', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='extra_txos', full_name='pb.Outputs.extra_txos', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='total', full_name='pb.Outputs.total', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='pb.Outputs.offset', index=3,
number=4, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='blocked', full_name='pb.Outputs.blocked', index=4,
number=5, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='blocked_total', full_name='pb.Outputs.blocked_total', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21,
serialized_end=172,
)
_OUTPUT = _descriptor.Descriptor(
name='Output',
full_name='pb.Output',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='tx_hash', full_name='pb.Output.tx_hash', index=0,
number=1, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='nout', full_name='pb.Output.nout', index=1,
number=2, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='height', full_name='pb.Output.height', index=2,
number=3, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claim', full_name='pb.Output.claim', index=3,
number=7, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='error', full_name='pb.Output.error', index=4,
number=15, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='meta', full_name='pb.Output.meta',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=174,
serialized_end=297,
)
_CLAIMMETA = _descriptor.Descriptor(
name='ClaimMeta',
full_name='pb.ClaimMeta',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='channel', full_name='pb.ClaimMeta.channel', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repost', full_name='pb.ClaimMeta.repost', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='short_url', full_name='pb.ClaimMeta.short_url', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='canonical_url', full_name='pb.ClaimMeta.canonical_url', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='is_controlling', full_name='pb.ClaimMeta.is_controlling', index=4,
number=5, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='take_over_height', full_name='pb.ClaimMeta.take_over_height', index=5,
number=6, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='creation_height', full_name='pb.ClaimMeta.creation_height', index=6,
number=7, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='activation_height', full_name='pb.ClaimMeta.activation_height', index=7,
number=8, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='expiration_height', full_name='pb.ClaimMeta.expiration_height', index=8,
number=9, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='claims_in_channel', full_name='pb.ClaimMeta.claims_in_channel', index=9,
number=10, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='reposted', full_name='pb.ClaimMeta.reposted', index=10,
number=11, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='effective_amount', full_name='pb.ClaimMeta.effective_amount', index=11,
number=20, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='support_amount', full_name='pb.ClaimMeta.support_amount', index=12,
number=21, type=4, cpp_type=4, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='trending_score', full_name='pb.ClaimMeta.trending_score', index=13,
number=22, type=1, cpp_type=5, label=1,
has_default_value=False, default_value=float(0),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=300,
serialized_end=658,
)
_ERROR = _descriptor.Descriptor(
name='Error',
full_name='pb.Error',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='code', full_name='pb.Error.code', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='text', full_name='pb.Error.text', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='blocked', full_name='pb.Error.blocked', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_ERROR_CODE,
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=661,
serialized_end=809,
)
_BLOCKED = _descriptor.Descriptor(
name='Blocked',
full_name='pb.Blocked',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='count', full_name='pb.Blocked.count', index=0,
number=1, type=13, cpp_type=3, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='channel', full_name='pb.Blocked.channel', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=811,
serialized_end=864,
)
_OUTPUTS.fields_by_name['txos'].message_type = _OUTPUT
_OUTPUTS.fields_by_name['extra_txos'].message_type = _OUTPUT
_OUTPUTS.fields_by_name['blocked'].message_type = _BLOCKED
_OUTPUT.fields_by_name['claim'].message_type = _CLAIMMETA
_OUTPUT.fields_by_name['error'].message_type = _ERROR
_OUTPUT.oneofs_by_name['meta'].fields.append(
_OUTPUT.fields_by_name['claim'])
_OUTPUT.fields_by_name['claim'].containing_oneof = _OUTPUT.oneofs_by_name['meta']
_OUTPUT.oneofs_by_name['meta'].fields.append(
_OUTPUT.fields_by_name['error'])
_OUTPUT.fields_by_name['error'].containing_oneof = _OUTPUT.oneofs_by_name['meta']
_CLAIMMETA.fields_by_name['channel'].message_type = _OUTPUT
_CLAIMMETA.fields_by_name['repost'].message_type = _OUTPUT
_ERROR.fields_by_name['code'].enum_type = _ERROR_CODE
_ERROR.fields_by_name['blocked'].message_type = _BLOCKED
_ERROR_CODE.containing_type = _ERROR
_BLOCKED.fields_by_name['channel'].message_type = _OUTPUT
DESCRIPTOR.message_types_by_name['Outputs'] = _OUTPUTS
DESCRIPTOR.message_types_by_name['Output'] = _OUTPUT
DESCRIPTOR.message_types_by_name['ClaimMeta'] = _CLAIMMETA
DESCRIPTOR.message_types_by_name['Error'] = _ERROR
DESCRIPTOR.message_types_by_name['Blocked'] = _BLOCKED
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
Outputs = _reflection.GeneratedProtocolMessageType('Outputs', (_message.Message,), {
'DESCRIPTOR' : _OUTPUTS,
'__module__' : 'result_pb2'
# @@protoc_insertion_point(class_scope:pb.Outputs)
})
_sym_db.RegisterMessage(Outputs)
Output = _reflection.GeneratedProtocolMessageType('Output', (_message.Message,), {
'DESCRIPTOR' : _OUTPUT,
'__module__' : 'result_pb2'
# @@protoc_insertion_point(class_scope:pb.Output)
})
_sym_db.RegisterMessage(Output)
ClaimMeta = _reflection.GeneratedProtocolMessageType('ClaimMeta', (_message.Message,), {
'DESCRIPTOR' : _CLAIMMETA,
'__module__' : 'result_pb2'
# @@protoc_insertion_point(class_scope:pb.ClaimMeta)
})
_sym_db.RegisterMessage(ClaimMeta)
Error = _reflection.GeneratedProtocolMessageType('Error', (_message.Message,), {
'DESCRIPTOR' : _ERROR,
'__module__' : 'result_pb2'
# @@protoc_insertion_point(class_scope:pb.Error)
})
_sym_db.RegisterMessage(Error)
Blocked = _reflection.GeneratedProtocolMessageType('Blocked', (_message.Message,), {
'DESCRIPTOR' : _BLOCKED,
'__module__' : 'result_pb2'
# @@protoc_insertion_point(class_scope:pb.Blocked)
})
_sym_db.RegisterMessage(Blocked)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)

View file

@ -1,4 +0,0 @@
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
"""Client and server classes corresponding to protobuf-defined services."""
import grpc

View file

@ -1,76 +0,0 @@
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: support.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='support.proto',
package='pb',
syntax='proto3',
serialized_pb=_b('\n\rsupport.proto\x12\x02pb\")\n\x07Support\x12\r\n\x05\x65moji\x18\x01 \x01(\t\x12\x0f\n\x07\x63omment\x18\x02 \x01(\tb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_SUPPORT = _descriptor.Descriptor(
name='Support',
full_name='pb.Support',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='emoji', full_name='pb.Support.emoji', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='comment', full_name='pb.Support.comment', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=21,
serialized_end=62,
)
DESCRIPTOR.message_types_by_name['Support'] = _SUPPORT
Support = _reflection.GeneratedProtocolMessageType('Support', (_message.Message,), dict(
DESCRIPTOR = _SUPPORT,
__module__ = 'support_pb2'
# @@protoc_insertion_point(class_scope:pb.Support)
))
_sym_db.RegisterMessage(Support)
# @@protoc_insertion_point(module_scope)

View file

@ -1,130 +0,0 @@
import re
import unicodedata
from typing import NamedTuple, Tuple
def _create_url_regex():
# see https://spec.lbry.com/ and test_url.py
invalid_names_regex = \
r"[^=&#:$@%?;\"/\\<>%{}|^~`\[\]" \
r"\u0000-\u0020\uD800-\uDFFF\uFFFE-\uFFFF]+"
def _named(name, regex):
return "(?P<" + name + ">" + regex + ")"
def _group(regex):
return "(?:" + regex + ")"
def _oneof(*choices):
return _group('|'.join(choices))
def _claim(name, prefix=""):
return _group(
_named(name+"_name", prefix + invalid_names_regex) +
_oneof(
_group('[:#]' + _named(name+"_claim_id", "[0-9a-f]{1,40}")),
_group(r'\$' + _named(name+"_amount_order", '[1-9][0-9]*'))
) + '?'
)
return (
'^' +
_named("scheme", "lbry://") + '?' +
_oneof(
_group(_claim("channel_with_stream", "@") + "/" + _claim("stream_in_channel")),
_claim("channel", "@"),
_claim("stream")
) +
'$'
)
URL_REGEX = _create_url_regex()
def normalize_name(name):
return unicodedata.normalize('NFD', name).casefold()
class PathSegment(NamedTuple):
name: str
claim_id: str = None
amount_order: int = None
@property
def normalized(self):
return normalize_name(self.name)
@property
def is_shortid(self):
return self.claim_id is not None and len(self.claim_id) < 40
@property
def is_fullid(self):
return self.claim_id is not None and len(self.claim_id) == 40
def to_dict(self):
q = {'name': self.name}
if self.claim_id is not None:
q['claim_id'] = self.claim_id
if self.amount_order is not None:
q['amount_order'] = self.amount_order
return q
def __str__(self):
if self.claim_id is not None:
return f"{self.name}:{self.claim_id}"
elif self.amount_order is not None:
return f"{self.name}${self.amount_order}"
return self.name
class URL(NamedTuple):
stream: PathSegment
channel: PathSegment
@property
def has_channel(self):
return self.channel is not None
@property
def has_stream(self):
return self.stream is not None
@property
def has_stream_in_channel(self):
return self.has_channel and self.has_stream
@property
def parts(self) -> Tuple:
if self.has_stream_in_channel:
return self.channel, self.stream
if self.has_channel:
return self.channel,
return self.stream,
def __str__(self):
return f"lbry://{'/'.join(str(p) for p in self.parts)}"
@classmethod
def parse(cls, url):
match = re.match(URL_REGEX, url)
if match is None:
raise ValueError('Invalid LBRY URL')
segments = {}
parts = match.groupdict()
for segment in ('channel', 'stream', 'channel_with_stream', 'stream_in_channel'):
if parts[f'{segment}_name'] is not None:
segments[segment] = PathSegment(
parts[f'{segment}_name'],
parts[f'{segment}_claim_id'],
parts[f'{segment}_amount_order']
)
if 'channel_with_stream' in segments:
segments['channel'] = segments['channel_with_stream']
segments['stream'] = segments['stream_in_channel']
return cls(segments.get('stream', None), segments.get('channel', None))

View file

@ -6,7 +6,7 @@ import logging
from typing import Optional from typing import Optional
from aiohttp.web import Request, StreamResponse, HTTPRequestRangeNotSatisfiable from aiohttp.web import Request, StreamResponse, HTTPRequestRangeNotSatisfiable
from lbry.error import DownloadSDTimeoutError from lbry.error import DownloadSDTimeoutError
from lbry.schema.mime_types import guess_media_type from scribe.schema.mime_types import guess_media_type
from lbry.stream.downloader import StreamDownloader from lbry.stream.downloader import StreamDownloader
from lbry.stream.descriptor import StreamDescriptor, sanitize_file_name from lbry.stream.descriptor import StreamDescriptor, sanitize_file_name
from lbry.stream.reflector.client import StreamReflectorClient from lbry.stream.reflector.client import StreamReflectorClient
@ -16,7 +16,7 @@ from lbry.file.source import ManagedDownloadSource
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from lbry.conf import Config from lbry.conf import Config
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.blob.blob_manager import BlobManager from lbry.blob.blob_manager import BlobManager
from lbry.blob.blob_info import BlobInfo from lbry.blob.blob_info import BlobInfo
from lbry.dht.node import Node from lbry.dht.node import Node

View file

@ -19,8 +19,8 @@ from lbry.conf import Config
from lbry.wallet.util import satoshis_to_coins from lbry.wallet.util import satoshis_to_coins
from lbry.wallet.dewies import lbc_to_dewies from lbry.wallet.dewies import lbc_to_dewies
from lbry.wallet.orchstr8 import Conductor from lbry.wallet.orchstr8 import Conductor
from lbry.wallet.orchstr8.node import BlockchainNode, WalletNode, HubNode from lbry.wallet.orchstr8.node import LBCWalletNode, WalletNode, HubNode
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.extras.daemon.daemon import Daemon, jsonrpc_dumps_pretty from lbry.extras.daemon.daemon import Daemon, jsonrpc_dumps_pretty
from lbry.extras.daemon.components import Component, WalletComponent from lbry.extras.daemon.components import Component, WalletComponent
@ -230,7 +230,7 @@ class IntegrationTestCase(AsyncioTestCase):
def __init__(self, *args, **kwargs): def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs) super().__init__(*args, **kwargs)
self.conductor: Optional[Conductor] = None self.conductor: Optional[Conductor] = None
self.blockchain: Optional[BlockchainNode] = None self.blockchain: Optional[LBCWalletNode] = None
self.hub: Optional[HubNode] = None self.hub: Optional[HubNode] = None
self.wallet_node: Optional[WalletNode] = None self.wallet_node: Optional[WalletNode] = None
self.manager: Optional[WalletManager] = None self.manager: Optional[WalletManager] = None
@ -240,15 +240,17 @@ class IntegrationTestCase(AsyncioTestCase):
async def asyncSetUp(self): async def asyncSetUp(self):
self.conductor = Conductor(seed=self.SEED) self.conductor = Conductor(seed=self.SEED)
await self.conductor.start_blockchain() await self.conductor.start_lbcd()
self.addCleanup(self.conductor.stop_blockchain) self.addCleanup(self.conductor.stop_lbcd)
await self.conductor.start_lbcwallet()
self.addCleanup(self.conductor.stop_lbcwallet)
await self.conductor.start_spv() await self.conductor.start_spv()
self.addCleanup(self.conductor.stop_spv) self.addCleanup(self.conductor.stop_spv)
await self.conductor.start_wallet() await self.conductor.start_wallet()
self.addCleanup(self.conductor.stop_wallet) self.addCleanup(self.conductor.stop_wallet)
await self.conductor.start_hub() await self.conductor.start_hub()
self.addCleanup(self.conductor.stop_hub) self.addCleanup(self.conductor.stop_hub)
self.blockchain = self.conductor.blockchain_node self.blockchain = self.conductor.lbcwallet_node
self.hub = self.conductor.hub_node self.hub = self.conductor.hub_node
self.wallet_node = self.conductor.wallet_node self.wallet_node = self.conductor.wallet_node
self.manager = self.wallet_node.manager self.manager = self.wallet_node.manager
@ -263,6 +265,13 @@ class IntegrationTestCase(AsyncioTestCase):
def broadcast(self, tx): def broadcast(self, tx):
return self.ledger.broadcast(tx) return self.ledger.broadcast(tx)
async def broadcast_and_confirm(self, tx, ledger=None):
ledger = ledger or self.ledger
notifications = asyncio.create_task(ledger.wait(tx))
await ledger.broadcast(tx)
await notifications
await self.generate_and_wait(1, [tx.id], ledger)
async def on_header(self, height): async def on_header(self, height):
if self.ledger.headers.height < height: if self.ledger.headers.height < height:
await self.ledger.on_header.where( await self.ledger.on_header.where(
@ -270,11 +279,36 @@ class IntegrationTestCase(AsyncioTestCase):
) )
return True return True
def on_transaction_id(self, txid, ledger=None): async def send_to_address_and_wait(self, address, amount, blocks_to_generate=0, ledger=None):
return (ledger or self.ledger).on_transaction.where( tx_watch = []
lambda e: e.tx.id == txid txid = None
done = False
watcher = (ledger or self.ledger).on_transaction.where(
lambda e: e.tx.id == txid or done or tx_watch.append(e.tx.id)
) )
txid = await self.blockchain.send_to_address(address, amount)
done = txid in tx_watch
await watcher
await self.generate_and_wait(blocks_to_generate, [txid], ledger)
return txid
async def generate_and_wait(self, blocks_to_generate, txids, ledger=None):
if blocks_to_generate > 0:
watcher = (ledger or self.ledger).on_transaction.where(
lambda e: ((e.tx.id in txids and txids.remove(e.tx.id)), len(txids) <= 0)[-1] # multi-statement lambda
)
self.conductor.spv_node.server.synchronized.clear()
await self.blockchain.generate(blocks_to_generate)
height = self.blockchain.block_expected
await watcher
while True:
await self.conductor.spv_node.server.synchronized.wait()
self.conductor.spv_node.server.synchronized.clear()
if self.conductor.spv_node.server.db.db_height >= height:
break
def on_address_update(self, address): def on_address_update(self, address):
return self.ledger.on_transaction.where( return self.ledger.on_transaction.where(
lambda e: e.address == address lambda e: e.address == address
@ -285,6 +319,19 @@ class IntegrationTestCase(AsyncioTestCase):
lambda e: e.tx.id == tx.id and e.address == address lambda e: e.tx.id == tx.id and e.address == address
) )
async def generate(self, blocks):
""" Ask lbrycrd to generate some blocks and wait until ledger has them. """
prepare = self.ledger.on_header.where(self.blockchain.is_expected_block)
height = self.blockchain.block_expected
self.conductor.spv_node.server.synchronized.clear()
await self.blockchain.generate(blocks)
await prepare # no guarantee that it didn't happen already, so start waiting from before calling generate
while True:
await self.conductor.spv_node.server.synchronized.wait()
self.conductor.spv_node.server.synchronized.clear()
if self.conductor.spv_node.server.db.db_height >= height:
break
class FakeExchangeRateManager(ExchangeRateManager): class FakeExchangeRateManager(ExchangeRateManager):
@ -345,20 +392,19 @@ class CommandTestCase(IntegrationTestCase):
self.skip_libtorrent = True self.skip_libtorrent = True
async def asyncSetUp(self): async def asyncSetUp(self):
await super().asyncSetUp()
logging.getLogger('lbry.blob_exchange').setLevel(self.VERBOSITY) logging.getLogger('lbry.blob_exchange').setLevel(self.VERBOSITY)
logging.getLogger('lbry.daemon').setLevel(self.VERBOSITY) logging.getLogger('lbry.daemon').setLevel(self.VERBOSITY)
logging.getLogger('lbry.stream').setLevel(self.VERBOSITY) logging.getLogger('lbry.stream').setLevel(self.VERBOSITY)
logging.getLogger('lbry.wallet').setLevel(self.VERBOSITY) logging.getLogger('lbry.wallet').setLevel(self.VERBOSITY)
await super().asyncSetUp()
self.daemon = await self.add_daemon(self.wallet_node) self.daemon = await self.add_daemon(self.wallet_node)
await self.account.ensure_address_gap() await self.account.ensure_address_gap()
address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0] address = (await self.account.receiving.get_addresses(limit=1, only_usable=True))[0]
sendtxid = await self.blockchain.send_to_address(address, 10) await self.send_to_address_and_wait(address, 10, 6)
await self.confirm_tx(sendtxid)
await self.generate(5)
server_tmp_dir = tempfile.mkdtemp() server_tmp_dir = tempfile.mkdtemp()
self.addCleanup(shutil.rmtree, server_tmp_dir) self.addCleanup(shutil.rmtree, server_tmp_dir)
@ -455,9 +501,14 @@ class CommandTestCase(IntegrationTestCase):
async def confirm_tx(self, txid, ledger=None): async def confirm_tx(self, txid, ledger=None):
""" Wait for tx to be in mempool, then generate a block, wait for tx to be in a block. """ """ Wait for tx to be in mempool, then generate a block, wait for tx to be in a block. """
await self.on_transaction_id(txid, ledger) # await (ledger or self.ledger).on_transaction.where(lambda e: e.tx.id == txid)
await self.generate(1) on_tx = (ledger or self.ledger).on_transaction.where(lambda e: e.tx.id == txid)
await self.on_transaction_id(txid, ledger) await asyncio.wait([self.generate(1), on_tx], timeout=5)
# # actually, if it's in the mempool or in the block we're fine
# await self.generate_and_wait(1, [txid], ledger=ledger)
# return txid
return txid return txid
async def on_transaction_dict(self, tx): async def on_transaction_dict(self, tx):
@ -472,12 +523,6 @@ class CommandTestCase(IntegrationTestCase):
addresses.add(txo['address']) addresses.add(txo['address'])
return list(addresses) return list(addresses)
async def generate(self, blocks):
""" Ask lbrycrd to generate some blocks and wait until ledger has them. """
prepare = self.ledger.on_header.where(self.blockchain.is_expected_block)
await self.blockchain.generate(blocks)
await prepare # no guarantee that it didn't happen already, so start waiting from before calling generate
async def blockchain_claim_name(self, name: str, value: str, amount: str, confirm=True): async def blockchain_claim_name(self, name: str, value: str, amount: str, confirm=True):
txid = await self.blockchain._cli_cmnd('claimname', name, value, amount) txid = await self.blockchain._cli_cmnd('claimname', name, value, amount)
if confirm: if confirm:
@ -508,7 +553,7 @@ class CommandTestCase(IntegrationTestCase):
return self.sout(tx) return self.sout(tx)
return tx return tx
async def create_nondeterministic_channel(self, name, price, pubkey_bytes, daemon=None): async def create_nondeterministic_channel(self, name, price, pubkey_bytes, daemon=None, blocking=False):
account = (daemon or self.daemon).wallet_manager.default_account account = (daemon or self.daemon).wallet_manager.default_account
claim_address = await account.receiving.get_or_create_usable_address() claim_address = await account.receiving.get_or_create_usable_address()
claim = Claim() claim = Claim()
@ -518,7 +563,7 @@ class CommandTestCase(IntegrationTestCase):
claim_address, [self.account], self.account claim_address, [self.account], self.account
) )
await tx.sign([self.account]) await tx.sign([self.account])
await (daemon or self.daemon).broadcast_or_release(tx, False) await (daemon or self.daemon).broadcast_or_release(tx, blocking)
return self.sout(tx) return self.sout(tx)
def create_upload_file(self, data, prefix=None, suffix=None): def create_upload_file(self, data, prefix=None, suffix=None):

View file

@ -21,7 +21,7 @@ import pkg_resources
import certifi import certifi
import aiohttp import aiohttp
from prometheus_client import Counter from prometheus_client import Counter
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -405,7 +405,7 @@ async def fallback_get_external_ip(): # used if spv servers can't be used for i
async def _get_external_ip(default_servers) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]: async def _get_external_ip(default_servers) -> typing.Tuple[typing.Optional[str], typing.Optional[str]]:
# used if upnp is disabled or non-functioning # used if upnp is disabled or non-functioning
from lbry.wallet.server.udp import SPVStatusClientProtocol # pylint: disable=C0415 from lbry.wallet.udp import SPVStatusClientProtocol # pylint: disable=C0415
hostname_to_ip = {} hostname_to_ip = {}
ip_to_hostnames = collections.defaultdict(list) ip_to_hostnames = collections.defaultdict(list)

View file

@ -1,17 +1,23 @@
__node_daemon__ = 'lbrycrdd' __lbcd__ = 'lbcd'
__node_cli__ = 'lbrycrd-cli' __lbcctl__ = 'lbcctl'
__node_bin__ = '' __lbcwallet__ = 'lbcwallet'
__node_url__ = ( __lbcd_url__ = (
'https://github.com/lbryio/lbrycrd/releases/download/v0.17.4.6/lbrycrd-linux-1746.zip' 'https://github.com/lbryio/lbcd/releases/download/' +
'v0.22.200-beta/lbcd_0.22.200-beta_TARGET_PLATFORM.tar.gz'
)
__lbcwallet_url__ = (
'https://github.com/lbryio/lbcwallet/releases/download/' +
'v0.13.100-alpha-rc2/lbcwallet_0.13.100-alpha-rc2_TARGET_PLATFORM.tar.gz'
) )
__spvserver__ = 'lbry.wallet.server.coin.LBCRegTest' __spvserver__ = 'lbry.wallet.server.coin.LBCRegTest'
from .wallet import Wallet, WalletStorage, TimestampedPreferences, ENCRYPT_ON_DISK from lbry.wallet.wallet import Wallet, WalletStorage, TimestampedPreferences, ENCRYPT_ON_DISK
from .manager import WalletManager from lbry.wallet.manager import WalletManager
from .network import Network from lbry.wallet.network import Network
from .ledger import Ledger, RegTestLedger, TestNetLedger, BlockHeightEvent from lbry.wallet.ledger import Ledger, RegTestLedger, TestNetLedger, BlockHeightEvent
from .account import Account, AddressManager, SingleKey, HierarchicalDeterministic, DeterministicChannelKeyManager from lbry.wallet.account import Account, AddressManager, SingleKey, HierarchicalDeterministic, \
from .transaction import Transaction, Output, Input DeterministicChannelKeyManager
from .script import OutputScript, InputScript from lbry.wallet.transaction import Transaction, Output, Input
from .database import SQLiteMixin, Database from lbry.wallet.script import OutputScript, InputScript
from .header import Headers from lbry.wallet.database import SQLiteMixin, Database
from lbry.wallet.header import Headers

View file

@ -12,7 +12,7 @@ from typing import Type, Dict, Tuple, Optional, Any, List
from lbry.error import InvalidPasswordError from lbry.error import InvalidPasswordError
from lbry.crypto.crypt import aes_encrypt, aes_decrypt from lbry.crypto.crypt import aes_encrypt, aes_decrypt
from .bip32 import PrivateKey, PublicKey, KeyPath, from_extended_key_string from scribe.schema.bip32 import PrivateKey, PublicKey, KeyPath, from_extended_key_string
from .mnemonic import Mnemonic from .mnemonic import Mnemonic
from .constants import COIN, TXO_TYPES from .constants import COIN, TXO_TYPES
from .transaction import Transaction, Input, Output from .transaction import Transaction, Input, Output

View file

@ -1,338 +0,0 @@
from asn1crypto.keys import PrivateKeyInfo, ECPrivateKey
from coincurve import PublicKey as cPublicKey, PrivateKey as cPrivateKey
from coincurve.utils import (
pem_to_der, lib as libsecp256k1, ffi as libsecp256k1_ffi
)
from coincurve.ecdsa import CDATA_SIG_LENGTH
from lbry.crypto.hash import hmac_sha512, hash160, double_sha256
from lbry.crypto.base58 import Base58
from .util import cachedproperty
class KeyPath:
RECEIVE = 0
CHANGE = 1
CHANNEL = 2
class DerivationError(Exception):
""" Raised when an invalid derivation occurs. """
class _KeyBase:
""" A BIP32 Key, public or private. """
def __init__(self, ledger, chain_code, n, depth, parent):
if not isinstance(chain_code, (bytes, bytearray)):
raise TypeError('chain code must be raw bytes')
if len(chain_code) != 32:
raise ValueError('invalid chain code')
if not 0 <= n < 1 << 32:
raise ValueError('invalid child number')
if not 0 <= depth < 256:
raise ValueError('invalid depth')
if parent is not None:
if not isinstance(parent, type(self)):
raise TypeError('parent key has bad type')
self.ledger = ledger
self.chain_code = chain_code
self.n = n
self.depth = depth
self.parent = parent
def _hmac_sha512(self, msg):
""" Use SHA-512 to provide an HMAC, returned as a pair of 32-byte objects. """
hmac = hmac_sha512(self.chain_code, msg)
return hmac[:32], hmac[32:]
def _extended_key(self, ver_bytes, raw_serkey):
""" Return the 78-byte extended key given prefix version bytes and serialized key bytes. """
if not isinstance(ver_bytes, (bytes, bytearray)):
raise TypeError('ver_bytes must be raw bytes')
if len(ver_bytes) != 4:
raise ValueError('ver_bytes must have length 4')
if not isinstance(raw_serkey, (bytes, bytearray)):
raise TypeError('raw_serkey must be raw bytes')
if len(raw_serkey) != 33:
raise ValueError('raw_serkey must have length 33')
return (
ver_bytes + bytes((self.depth,))
+ self.parent_fingerprint() + self.n.to_bytes(4, 'big')
+ self.chain_code + raw_serkey
)
def identifier(self):
raise NotImplementedError
def extended_key(self):
raise NotImplementedError
def fingerprint(self):
""" Return the key's fingerprint as 4 bytes. """
return self.identifier()[:4]
def parent_fingerprint(self):
""" Return the parent key's fingerprint as 4 bytes. """
return self.parent.fingerprint() if self.parent else bytes((0,)*4)
def extended_key_string(self):
""" Return an extended key as a base58 string. """
return Base58.encode_check(self.extended_key())
class PublicKey(_KeyBase):
""" A BIP32 public key. """
def __init__(self, ledger, pubkey, chain_code, n, depth, parent=None):
super().__init__(ledger, chain_code, n, depth, parent)
if isinstance(pubkey, cPublicKey):
self.verifying_key = pubkey
else:
self.verifying_key = self._verifying_key_from_pubkey(pubkey)
@classmethod
def from_compressed(cls, public_key_bytes, ledger=None) -> 'PublicKey':
return cls(ledger, public_key_bytes, bytes((0,)*32), 0, 0)
@classmethod
def _verifying_key_from_pubkey(cls, pubkey):
""" Converts a 33-byte compressed pubkey into an coincurve.PublicKey object. """
if not isinstance(pubkey, (bytes, bytearray)):
raise TypeError('pubkey must be raw bytes')
if len(pubkey) != 33:
raise ValueError('pubkey must be 33 bytes')
if pubkey[0] not in (2, 3):
raise ValueError('invalid pubkey prefix byte')
return cPublicKey(pubkey)
@cachedproperty
def pubkey_bytes(self):
""" Return the compressed public key as 33 bytes. """
return self.verifying_key.format(True)
@cachedproperty
def address(self):
""" The public key as a P2PKH address. """
return self.ledger.public_key_to_address(self.pubkey_bytes)
def ec_point(self):
return self.verifying_key.point()
def child(self, n: int) -> 'PublicKey':
""" Return the derived child extended pubkey at index N. """
if not 0 <= n < (1 << 31):
raise ValueError('invalid BIP32 public key child number')
msg = self.pubkey_bytes + n.to_bytes(4, 'big')
L_b, R_b = self._hmac_sha512(msg) # pylint: disable=invalid-name
derived_key = self.verifying_key.add(L_b)
return PublicKey(self.ledger, derived_key, R_b, n, self.depth + 1, self)
def identifier(self):
""" Return the key's identifier as 20 bytes. """
return hash160(self.pubkey_bytes)
def extended_key(self):
""" Return a raw extended public key. """
return self._extended_key(
self.ledger.extended_public_key_prefix,
self.pubkey_bytes
)
def verify(self, signature, digest) -> bool:
""" Verify that a signature is valid for a 32 byte digest. """
if len(signature) != 64:
raise ValueError('Signature must be 64 bytes long.')
if len(digest) != 32:
raise ValueError('Digest must be 32 bytes long.')
key = self.verifying_key
raw_signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
parsed = libsecp256k1.secp256k1_ecdsa_signature_parse_compact(
key.context.ctx, raw_signature, signature
)
assert parsed == 1
normalized_signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
libsecp256k1.secp256k1_ecdsa_signature_normalize(
key.context.ctx, normalized_signature, raw_signature
)
verified = libsecp256k1.secp256k1_ecdsa_verify(
key.context.ctx, normalized_signature, digest, key.public_key
)
return bool(verified)
class PrivateKey(_KeyBase):
"""A BIP32 private key."""
HARDENED = 1 << 31
def __init__(self, ledger, privkey, chain_code, n, depth, parent=None):
super().__init__(ledger, chain_code, n, depth, parent)
if isinstance(privkey, cPrivateKey):
self.signing_key = privkey
else:
self.signing_key = self._signing_key_from_privkey(privkey)
@classmethod
def _signing_key_from_privkey(cls, private_key):
""" Converts a 32-byte private key into an coincurve.PrivateKey object. """
return cPrivateKey.from_int(PrivateKey._private_key_secret_exponent(private_key))
@classmethod
def _private_key_secret_exponent(cls, private_key):
""" Return the private key as a secret exponent if it is a valid private key. """
if not isinstance(private_key, (bytes, bytearray)):
raise TypeError('private key must be raw bytes')
if len(private_key) != 32:
raise ValueError('private key must be 32 bytes')
return int.from_bytes(private_key, 'big')
@classmethod
def from_seed(cls, ledger, seed) -> 'PrivateKey':
# This hard-coded message string seems to be coin-independent...
hmac = hmac_sha512(b'Bitcoin seed', seed)
privkey, chain_code = hmac[:32], hmac[32:]
return cls(ledger, privkey, chain_code, 0, 0)
@classmethod
def from_pem(cls, ledger, pem) -> 'PrivateKey':
der = pem_to_der(pem.encode())
try:
key_int = ECPrivateKey.load(der).native['private_key']
except ValueError:
key_int = PrivateKeyInfo.load(der).native['private_key']['private_key']
private_key = cPrivateKey.from_int(key_int)
return cls(ledger, private_key, bytes((0,)*32), 0, 0)
@cachedproperty
def private_key_bytes(self):
""" Return the serialized private key (no leading zero byte). """
return self.signing_key.secret
@cachedproperty
def public_key(self) -> PublicKey:
""" Return the corresponding extended public key. """
verifying_key = self.signing_key.public_key
parent_pubkey = self.parent.public_key if self.parent else None
return PublicKey(
self.ledger, verifying_key, self.chain_code,
self.n, self.depth, parent_pubkey
)
def ec_point(self):
return self.public_key.ec_point()
def secret_exponent(self):
""" Return the private key as a secret exponent. """
return self.signing_key.to_int()
def wif(self):
""" Return the private key encoded in Wallet Import Format. """
return self.ledger.private_key_to_wif(self.private_key_bytes)
@property
def address(self):
""" The public key as a P2PKH address. """
return self.public_key.address
def child(self, n) -> 'PrivateKey':
""" Return the derived child extended private key at index N."""
if not 0 <= n < (1 << 32):
raise ValueError('invalid BIP32 private key child number')
if n >= self.HARDENED:
serkey = b'\0' + self.private_key_bytes
else:
serkey = self.public_key.pubkey_bytes
msg = serkey + n.to_bytes(4, 'big')
L_b, R_b = self._hmac_sha512(msg) # pylint: disable=invalid-name
derived_key = self.signing_key.add(L_b)
return PrivateKey(self.ledger, derived_key, R_b, n, self.depth + 1, self)
def sign(self, data):
""" Produce a signature for piece of data by double hashing it and signing the hash. """
return self.signing_key.sign(data, hasher=double_sha256)
def sign_compact(self, digest):
""" Produce a compact signature. """
key = self.signing_key
signature = libsecp256k1_ffi.new('secp256k1_ecdsa_signature *')
signed = libsecp256k1.secp256k1_ecdsa_sign(
key.context.ctx, signature, digest, key.secret,
libsecp256k1_ffi.NULL, libsecp256k1_ffi.NULL
)
if not signed:
raise ValueError('The private key was invalid.')
serialized = libsecp256k1_ffi.new('unsigned char[%d]' % CDATA_SIG_LENGTH)
compacted = libsecp256k1.secp256k1_ecdsa_signature_serialize_compact(
key.context.ctx, serialized, signature
)
if compacted != 1:
raise ValueError('The signature could not be compacted.')
return bytes(libsecp256k1_ffi.buffer(serialized, CDATA_SIG_LENGTH))
def identifier(self):
"""Return the key's identifier as 20 bytes."""
return self.public_key.identifier()
def extended_key(self):
"""Return a raw extended private key."""
return self._extended_key(
self.ledger.extended_private_key_prefix,
b'\0' + self.private_key_bytes
)
def to_pem(self):
return self.signing_key.to_pem()
def _from_extended_key(ledger, ekey):
"""Return a PublicKey or PrivateKey from an extended key raw bytes."""
if not isinstance(ekey, (bytes, bytearray)):
raise TypeError('extended key must be raw bytes')
if len(ekey) != 78:
raise ValueError('extended key must have length 78')
depth = ekey[4]
n = int.from_bytes(ekey[9:13], 'big')
chain_code = ekey[13:45]
if ekey[:4] == ledger.extended_public_key_prefix:
pubkey = ekey[45:]
key = PublicKey(ledger, pubkey, chain_code, n, depth)
elif ekey[:4] == ledger.extended_private_key_prefix:
if ekey[45] != 0:
raise ValueError('invalid extended private key prefix byte')
privkey = ekey[46:]
key = PrivateKey(ledger, privkey, chain_code, n, depth)
else:
raise ValueError('version bytes unrecognised')
return key
def from_extended_key_string(ledger, ekey_str):
"""Given an extended key string, such as
xpub6BsnM1W2Y7qLMiuhi7f7dbAwQZ5Cz5gYJCRzTNainXzQXYjFwtuQXHd
3qfi3t3KJtHxshXezfjft93w4UE7BGMtKwhqEHae3ZA7d823DVrL
return a PublicKey or PrivateKey.
"""
return _from_extended_key(ledger, Base58.decode_check(ekey_str))

View file

@ -13,7 +13,7 @@ from datetime import date
from prometheus_client import Gauge, Counter, Histogram from prometheus_client import Gauge, Counter, Histogram
from lbry.utils import LockWithMetrics from lbry.utils import LockWithMetrics
from .bip32 import PublicKey from scribe.schema.bip32 import PublicKey
from .transaction import Transaction, Output, OutputScript, TXRefImmutable, Input from .transaction import Transaction, Output, OutputScript, TXRefImmutable, Input
from .constants import TXO_TYPES, CLAIM_TYPES from .constants import TXO_TYPES, CLAIM_TYPES
from .util import date_to_julian_day from .util import date_to_julian_day

View file

@ -10,24 +10,24 @@ from collections import defaultdict
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
from typing import Dict, Tuple, Type, Iterable, List, Optional, DefaultDict, NamedTuple from typing import Dict, Tuple, Type, Iterable, List, Optional, DefaultDict, NamedTuple
from lbry.schema.result import Outputs, INVALID, NOT_FOUND from scribe.schema.result import Outputs, INVALID, NOT_FOUND
from lbry.schema.url import URL from scribe.schema.url import URL
from lbry.crypto.hash import hash160, double_sha256, sha256 from lbry.crypto.hash import hash160, double_sha256, sha256
from lbry.crypto.base58 import Base58 from lbry.crypto.base58 import Base58
from lbry.utils import LRUCacheWithMetrics from lbry.utils import LRUCacheWithMetrics
from .tasks import TaskGroup from lbry.wallet.tasks import TaskGroup
from .database import Database from lbry.wallet.database import Database
from .stream import StreamController from lbry.wallet.stream import StreamController
from .dewies import dewies_to_lbc from lbry.wallet.dewies import dewies_to_lbc
from .account import Account, AddressManager, SingleKey from lbry.wallet.account import Account, AddressManager, SingleKey
from .network import Network from lbry.wallet.network import Network
from .transaction import Transaction, Output from lbry.wallet.transaction import Transaction, Output
from .header import Headers, UnvalidatedHeaders from lbry.wallet.header import Headers, UnvalidatedHeaders
from .checkpoints import HASHES from lbry.wallet.checkpoints import HASHES
from .constants import TXO_TYPES, CLAIM_TYPES, COIN, NULL_HASH32 from lbry.wallet.constants import TXO_TYPES, CLAIM_TYPES, COIN, NULL_HASH32
from .bip32 import PublicKey, PrivateKey from scribe.schema.bip32 import PublicKey, PrivateKey
from .coinselection import CoinSelector from lbry.wallet.coinselection import CoinSelector
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -365,6 +365,10 @@ class Ledger(metaclass=LedgerRegistry):
await self.db.close() await self.db.close()
await self.headers.close() await self.headers.close()
async def tasks_are_done(self):
await self._update_tasks.done.wait()
await self._other_tasks.done.wait()
@property @property
def local_height_including_downloaded_height(self): def local_height_including_downloaded_height(self):
return max(self.headers.height, self._download_height) return max(self.headers.height, self._download_height)
@ -739,7 +743,7 @@ class Ledger(metaclass=LedgerRegistry):
while timeout and (int(time.perf_counter()) - start) <= timeout: while timeout and (int(time.perf_counter()) - start) <= timeout:
if await self._wait_round(tx, height, addresses): if await self._wait_round(tx, height, addresses):
return return
raise asyncio.TimeoutError('Timed out waiting for transaction.') raise asyncio.TimeoutError(f'Timed out waiting for transaction. {tx.id}')
async def _wait_round(self, tx: Transaction, height: int, addresses: Iterable[str]): async def _wait_round(self, tx: Transaction, height: int, addresses: Iterable[str]):
records = await self.db.get_addresses(address__in=addresses) records = await self.db.get_addresses(address__in=addresses)
@ -782,7 +786,7 @@ class Ledger(metaclass=LedgerRegistry):
if hub_server: if hub_server:
outputs = Outputs.from_grpc(encoded_outputs) outputs = Outputs.from_grpc(encoded_outputs)
else: else:
outputs = Outputs.from_base64(encoded_outputs or b'') # TODO: why is the server returning None? outputs = Outputs.from_base64(encoded_outputs or '') # TODO: why is the server returning None?
txs: List[Transaction] = [] txs: List[Transaction] = []
if len(outputs.txs) > 0: if len(outputs.txs) > 0:
async for tx in self.request_transactions(tuple(outputs.txs), cached=True): async for tx in self.request_transactions(tuple(outputs.txs), cached=True):

View file

@ -12,13 +12,13 @@ from typing import List, Type, MutableSequence, MutableMapping, Optional
from lbry.error import KeyFeeAboveMaxAllowedError, WalletNotLoadedError from lbry.error import KeyFeeAboveMaxAllowedError, WalletNotLoadedError
from lbry.conf import Config, NOT_SET from lbry.conf import Config, NOT_SET
from .dewies import dewies_to_lbc from lbry.wallet.dewies import dewies_to_lbc
from .account import Account from lbry.wallet.account import Account
from .ledger import Ledger, LedgerRegistry from lbry.wallet.ledger import Ledger, LedgerRegistry
from .transaction import Transaction, Output from lbry.wallet.transaction import Transaction, Output
from .database import Database from lbry.wallet.database import Database
from .wallet import Wallet, WalletStorage, ENCRYPT_ON_DISK from lbry.wallet.wallet import Wallet, WalletStorage, ENCRYPT_ON_DISK
from .rpc.jsonrpc import CodeMessageError from lbry.wallet.rpc.jsonrpc import CodeMessageError
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager

View file

@ -8,15 +8,15 @@ from collections import defaultdict
from typing import Dict, Optional, Tuple from typing import Dict, Optional, Tuple
import aiohttp import aiohttp
import grpc import grpc
from lbry.schema.types.v2 import hub_pb2_grpc from scribe.schema.types.v2 import hub_pb2_grpc
from lbry.schema.types.v2.hub_pb2 import SearchRequest from scribe.schema.types.v2.hub_pb2 import SearchRequest
from lbry import __version__ from lbry import __version__
from lbry.utils import resolve_host from lbry.utils import resolve_host
from lbry.error import IncompatibleWalletServerError from lbry.error import IncompatibleWalletServerError
from lbry.wallet.rpc import RPCSession as BaseClientSession, Connector, RPCError, ProtocolError from lbry.wallet.rpc import RPCSession as BaseClientSession, Connector, RPCError, ProtocolError
from lbry.wallet.stream import StreamController from lbry.wallet.stream import StreamController
from lbry.wallet.server.udp import SPVStatusClientProtocol, SPVPong from lbry.wallet.udp import SPVStatusClientProtocol, SPVPong
from lbry.conf import KnownHubsList from lbry.conf import KnownHubsList
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
@ -122,7 +122,7 @@ class ClientSession(BaseClientSession):
await asyncio.sleep(max(0, max_idle - (now - self.last_send))) await asyncio.sleep(max(0, max_idle - (now - self.last_send)))
except Exception as err: except Exception as err:
if isinstance(err, asyncio.CancelledError): if isinstance(err, asyncio.CancelledError):
log.warning("closing connection to %s:%i", *self.server) log.info("closing connection to %s:%i", *self.server)
else: else:
log.exception("lost connection to spv") log.exception("lost connection to spv")
finally: finally:
@ -140,7 +140,7 @@ class ClientSession(BaseClientSession):
controller.add(request.args) controller.add(request.args)
def connection_lost(self, exc): def connection_lost(self, exc):
log.warning("Connection lost: %s:%d", *self.server) log.debug("Connection lost: %s:%d", *self.server)
super().connection_lost(exc) super().connection_lost(exc)
self.response_time = None self.response_time = None
self.connection_latency = None self.connection_latency = None
@ -303,7 +303,7 @@ class Network:
concurrency=self.config.get('concurrent_hub_requests', 30)) concurrency=self.config.get('concurrent_hub_requests', 30))
try: try:
await client.create_connection() await client.create_connection()
log.warning("Connected to spv server %s:%i", host, port) log.info("Connected to spv server %s:%i", host, port)
await client.ensure_server_version() await client.ensure_server_version()
return client return client
except (asyncio.TimeoutError, ConnectionError, OSError, IncompatibleWalletServerError, RPCError): except (asyncio.TimeoutError, ConnectionError, OSError, IncompatibleWalletServerError, RPCError):
@ -357,7 +357,7 @@ class Network:
self._keepalive_task = None self._keepalive_task = None
self.client = None self.client = None
self.server_features = None self.server_features = None
log.warning("connection lost to %s", server_str) log.info("connection lost to %s", server_str)
log.info("network loop finished") log.info("network loop finished")
async def stop(self): async def stop(self):

View file

@ -1,5 +1,5 @@
__hub_url__ = ( __hub_url__ = (
"https://github.com/lbryio/hub/releases/download/v0.2022.01.21.1/hub" "https://github.com/lbryio/hub/releases/download/v0.2022.01.21.1/hub"
) )
from .node import Conductor from lbry.wallet.orchstr8.node import Conductor
from .service import ConductorService from lbry.wallet.orchstr8.service import ConductorService

View file

@ -5,7 +5,9 @@ import aiohttp
from lbry import wallet from lbry import wallet
from lbry.wallet.orchstr8.node import ( from lbry.wallet.orchstr8.node import (
Conductor, get_blockchain_node_from_ledger Conductor,
get_lbcd_node_from_ledger,
get_lbcwallet_node_from_ledger
) )
from lbry.wallet.orchstr8.service import ConductorService from lbry.wallet.orchstr8.service import ConductorService
@ -16,10 +18,11 @@ def get_argument_parser():
) )
subparsers = parser.add_subparsers(dest='command', help='sub-command help') subparsers = parser.add_subparsers(dest='command', help='sub-command help')
subparsers.add_parser("download", help="Download blockchain node binary.") subparsers.add_parser("download", help="Download lbcd and lbcwallet node binaries.")
start = subparsers.add_parser("start", help="Start orchstr8 service.") start = subparsers.add_parser("start", help="Start orchstr8 service.")
start.add_argument("--blockchain", help="Hostname to start blockchain node.") start.add_argument("--lbcd", help="Hostname to start lbcd node.")
start.add_argument("--lbcwallet", help="Hostname to start lbcwallet node.")
start.add_argument("--spv", help="Hostname to start SPV server.") start.add_argument("--spv", help="Hostname to start SPV server.")
start.add_argument("--wallet", help="Hostname to start wallet daemon.") start.add_argument("--wallet", help="Hostname to start wallet daemon.")
@ -47,7 +50,8 @@ def main():
if command == 'download': if command == 'download':
logging.getLogger('blockchain').setLevel(logging.INFO) logging.getLogger('blockchain').setLevel(logging.INFO)
get_blockchain_node_from_ledger(wallet).ensure() get_lbcd_node_from_ledger(wallet).ensure()
get_lbcwallet_node_from_ledger(wallet).ensure()
elif command == 'generate': elif command == 'generate':
loop.run_until_complete(run_remote_command( loop.run_until_complete(run_remote_command(
@ -57,9 +61,12 @@ def main():
elif command == 'start': elif command == 'start':
conductor = Conductor() conductor = Conductor()
if getattr(args, 'blockchain', False): if getattr(args, 'lbcd', False):
conductor.blockchain_node.hostname = args.blockchain conductor.lbcd_node.hostname = args.lbcd
loop.run_until_complete(conductor.start_blockchain()) loop.run_until_complete(conductor.start_lbcd())
if getattr(args, 'lbcwallet', False):
conductor.lbcwallet_node.hostname = args.lbcwallet
loop.run_until_complete(conductor.start_lbcwallet())
if getattr(args, 'spv', False): if getattr(args, 'spv', False):
conductor.spv_node.hostname = args.spv conductor.spv_node.hostname = args.spv
loop.run_until_complete(conductor.start_spv()) loop.run_until_complete(conductor.start_spv())

View file

@ -1,4 +1,5 @@
import os import os
import signal
import json import json
import shutil import shutil
import asyncio import asyncio
@ -7,35 +8,39 @@ import tarfile
import logging import logging
import tempfile import tempfile
import subprocess import subprocess
import importlib import platform
from distutils.util import strtobool from distutils.util import strtobool
from binascii import hexlify from binascii import hexlify
from typing import Type, Optional from typing import Type, Optional
import urllib.request import urllib.request
from uuid import uuid4 from uuid import uuid4
import lbry import lbry
from lbry.wallet.server.server import Server
from lbry.wallet.server.env import Env
from lbry.wallet import Wallet, Ledger, RegTestLedger, WalletManager, Account, BlockHeightEvent from lbry.wallet import Wallet, Ledger, RegTestLedger, WalletManager, Account, BlockHeightEvent
from lbry.conf import KnownHubsList, Config from lbry.conf import KnownHubsList, Config
from lbry.wallet.orchstr8 import __hub_url__ from lbry.wallet.orchstr8 import __hub_url__
from scribe.env import Env
from scribe.hub.service import HubServerService
from scribe.elasticsearch.service import ElasticSyncService
from scribe.blockchain.service import BlockchainProcessorService
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
def get_spvserver_from_ledger(ledger_module): def get_lbcd_node_from_ledger(ledger_module):
spvserver_path, regtest_class_name = ledger_module.__spvserver__.rsplit('.', 1) return LBCDNode(
spvserver_module = importlib.import_module(spvserver_path) ledger_module.__lbcd_url__,
return getattr(spvserver_module, regtest_class_name) ledger_module.__lbcd__,
ledger_module.__lbcctl__
)
def get_blockchain_node_from_ledger(ledger_module): def get_lbcwallet_node_from_ledger(ledger_module):
return BlockchainNode( return LBCWalletNode(
ledger_module.__node_url__, ledger_module.__lbcwallet_url__,
os.path.join(ledger_module.__node_bin__, ledger_module.__node_daemon__), ledger_module.__lbcwallet__,
os.path.join(ledger_module.__node_bin__, ledger_module.__node_cli__) ledger_module.__lbcctl__
) )
@ -43,53 +48,51 @@ class Conductor:
def __init__(self, seed=None): def __init__(self, seed=None):
self.manager_module = WalletManager self.manager_module = WalletManager
self.spv_module = get_spvserver_from_ledger(lbry.wallet) self.lbcd_node = get_lbcd_node_from_ledger(lbry.wallet)
self.lbcwallet_node = get_lbcwallet_node_from_ledger(lbry.wallet)
self.blockchain_node = get_blockchain_node_from_ledger(lbry.wallet) self.spv_node = SPVNode()
self.spv_node = SPVNode(self.spv_module)
self.wallet_node = WalletNode( self.wallet_node = WalletNode(
self.manager_module, RegTestLedger, default_seed=seed self.manager_module, RegTestLedger, default_seed=seed
) )
self.hub_node = HubNode(__hub_url__, "hub", self.spv_node) self.hub_node = HubNode(__hub_url__, "hub", self.spv_node)
self.blockchain_started = False self.lbcd_started = False
self.lbcwallet_started = False
self.spv_started = False self.spv_started = False
self.wallet_started = False self.wallet_started = False
self.hub_started = False self.hub_started = False
self.log = log.getChild('conductor') self.log = log.getChild('conductor')
async def start_blockchain(self): async def start_lbcd(self):
if not self.blockchain_started: if not self.lbcd_started:
asyncio.create_task(self.blockchain_node.start()) await self.lbcd_node.start()
await self.blockchain_node.running.wait() self.lbcd_started = True
await self.blockchain_node.generate(200)
self.blockchain_started = True
async def stop_blockchain(self): async def stop_lbcd(self, cleanup=True):
if self.blockchain_started: if self.lbcd_started:
await self.blockchain_node.stop(cleanup=True) await self.lbcd_node.stop(cleanup)
self.blockchain_started = False self.lbcd_started = False
async def start_hub(self): async def start_hub(self):
if not self.hub_started: if not self.hub_started:
asyncio.create_task(self.hub_node.start()) await self.hub_node.start()
await self.blockchain_node.running.wait() await self.lbcwallet_node.running.wait()
self.hub_started = True self.hub_started = True
async def stop_hub(self): async def stop_hub(self, cleanup=True):
if self.hub_started: if self.hub_started:
await self.hub_node.stop(cleanup=True) await self.hub_node.stop(cleanup)
self.hub_started = False self.hub_started = False
async def start_spv(self): async def start_spv(self):
if not self.spv_started: if not self.spv_started:
await self.spv_node.start(self.blockchain_node) await self.spv_node.start(self.lbcwallet_node)
self.spv_started = True self.spv_started = True
async def stop_spv(self): async def stop_spv(self, cleanup=True):
if self.spv_started: if self.spv_started:
await self.spv_node.stop(cleanup=True) await self.spv_node.stop(cleanup)
self.spv_started = False self.spv_started = False
async def start_wallet(self): async def start_wallet(self):
@ -97,21 +100,41 @@ class Conductor:
await self.wallet_node.start(self.spv_node) await self.wallet_node.start(self.spv_node)
self.wallet_started = True self.wallet_started = True
async def stop_wallet(self): async def stop_wallet(self, cleanup=True):
if self.wallet_started: if self.wallet_started:
await self.wallet_node.stop(cleanup=True) await self.wallet_node.stop(cleanup)
self.wallet_started = False self.wallet_started = False
async def start_lbcwallet(self, clean=True):
if not self.lbcwallet_started:
await self.lbcwallet_node.start()
if clean:
mining_addr = await self.lbcwallet_node.get_new_address()
self.lbcwallet_node.mining_addr = mining_addr
await self.lbcwallet_node.generate(200)
# unlock the wallet for the next 1 hour
await self.lbcwallet_node.wallet_passphrase("password", 3600)
self.lbcwallet_started = True
async def stop_lbcwallet(self, cleanup=True):
if self.lbcwallet_started:
await self.lbcwallet_node.stop(cleanup)
self.lbcwallet_started = False
async def start(self): async def start(self):
await self.start_blockchain() await self.start_lbcd()
await self.start_lbcwallet()
await self.start_spv() await self.start_spv()
await self.start_hub()
await self.start_wallet() await self.start_wallet()
async def stop(self): async def stop(self):
all_the_stops = [ all_the_stops = [
self.stop_wallet, self.stop_wallet,
self.stop_hub,
self.stop_spv, self.stop_spv,
self.stop_blockchain self.stop_lbcwallet,
self.stop_lbcd
] ]
for stop in all_the_stops: for stop in all_the_stops:
try: try:
@ -119,6 +142,12 @@ class Conductor:
except Exception as e: except Exception as e:
log.exception('Exception raised while stopping services:', exc_info=e) log.exception('Exception raised while stopping services:', exc_info=e)
async def clear_mempool(self):
await self.stop_lbcwallet(cleanup=False)
await self.stop_lbcd(cleanup=False)
await self.start_lbcd()
await self.start_lbcwallet(clean=False)
class WalletNode: class WalletNode:
@ -139,10 +168,11 @@ class WalletNode:
async def start(self, spv_node: 'SPVNode', seed=None, connect=True, config=None): async def start(self, spv_node: 'SPVNode', seed=None, connect=True, config=None):
wallets_dir = os.path.join(self.data_path, 'wallets') wallets_dir = os.path.join(self.data_path, 'wallets')
os.mkdir(wallets_dir)
wallet_file_name = os.path.join(wallets_dir, 'my_wallet.json') wallet_file_name = os.path.join(wallets_dir, 'my_wallet.json')
with open(wallet_file_name, 'w') as wallet_file: if not os.path.isdir(wallets_dir):
wallet_file.write('{"version": 1, "accounts": []}\n') os.mkdir(wallets_dir)
with open(wallet_file_name, 'w') as wallet_file:
wallet_file.write('{"version": 1, "accounts": []}\n')
self.manager = self.manager_class.from_config({ self.manager = self.manager_class.from_config({
'ledgers': { 'ledgers': {
self.ledger_class.get_id(): { self.ledger_class.get_id(): {
@ -184,55 +214,72 @@ class WalletNode:
class SPVNode: class SPVNode:
def __init__(self, node_number=1):
def __init__(self, coin_class, node_number=1): self.node_number = node_number
self.coin_class = coin_class
self.controller = None self.controller = None
self.data_path = None self.data_path = None
self.server = None self.server: Optional[HubServerService] = None
self.writer: Optional[BlockchainProcessorService] = None
self.es_writer: Optional[ElasticSyncService] = None
self.hostname = 'localhost' self.hostname = 'localhost'
self.port = 50001 + node_number # avoid conflict with default daemon self.port = 50001 + node_number # avoid conflict with default daemon
self.udp_port = self.port self.udp_port = self.port
self.elastic_notifier_port = 19080 + node_number
self.session_timeout = 600 self.session_timeout = 600
self.rpc_port = '0' # disabled by default self.stopped = True
self.stopped = False
self.index_name = uuid4().hex self.index_name = uuid4().hex
async def start(self, blockchain_node: 'BlockchainNode', extraconf=None): async def start(self, lbcwallet_node: 'LBCWalletNode', extraconf=None):
self.data_path = tempfile.mkdtemp() if not self.stopped:
conf = { log.warning("spv node is already running")
'DESCRIPTION': '', return
'PAYMENT_ADDRESS': '', self.stopped = False
'DAILY_FEE': '0', try:
'DB_DIRECTORY': self.data_path, self.data_path = tempfile.mkdtemp()
'DAEMON_URL': blockchain_node.rpc_url, conf = {
'REORG_LIMIT': '100', 'description': '',
'HOST': self.hostname, 'payment_address': '',
'TCP_PORT': str(self.port), 'daily_fee': '0',
'UDP_PORT': str(self.udp_port), 'db_dir': self.data_path,
'SESSION_TIMEOUT': str(self.session_timeout), 'daemon_url': lbcwallet_node.rpc_url,
'MAX_QUERY_WORKERS': '0', 'reorg_limit': 100,
'INDIVIDUAL_TAG_INDEXES': '', 'host': self.hostname,
'RPC_PORT': self.rpc_port, 'tcp_port': self.port,
'ES_INDEX_PREFIX': self.index_name, 'udp_port': self.udp_port,
'ES_MODE': 'writer', 'elastic_notifier_port': self.elastic_notifier_port,
} 'session_timeout': self.session_timeout,
if extraconf: 'max_query_workers': 0,
conf.update(extraconf) 'es_index_prefix': self.index_name,
# TODO: don't use os.environ 'chain': 'regtest'
os.environ.update(conf) }
self.server = Server(Env(self.coin_class)) if extraconf:
self.server.bp.mempool.refresh_secs = self.server.bp.prefetcher.polling_delay = 0.5 conf.update(extraconf)
await self.server.start() env = Env(**conf)
self.writer = BlockchainProcessorService(env)
self.server = HubServerService(env)
self.es_writer = ElasticSyncService(env)
await self.writer.start()
await self.es_writer.start()
await self.server.start()
except Exception as e:
self.stopped = True
if not isinstance(e, asyncio.CancelledError):
log.exception("failed to start spv node")
raise e
async def stop(self, cleanup=True): async def stop(self, cleanup=True):
if self.stopped: if self.stopped:
log.warning("spv node is already stopped")
return return
try: try:
await self.server.db.search_index.delete_index()
await self.server.db.search_index.stop()
await self.server.stop() await self.server.stop()
await self.es_writer.delete_index()
await self.es_writer.stop()
await self.writer.stop()
self.stopped = True self.stopped = True
except Exception as e:
log.exception("failed to stop spv node")
raise e
finally: finally:
cleanup and self.cleanup() cleanup and self.cleanup()
@ -240,18 +287,19 @@ class SPVNode:
shutil.rmtree(self.data_path, ignore_errors=True) shutil.rmtree(self.data_path, ignore_errors=True)
class BlockchainProcess(asyncio.SubprocessProtocol): class LBCDProcess(asyncio.SubprocessProtocol):
IGNORE_OUTPUT = [ IGNORE_OUTPUT = [
b'keypool keep', b'keypool keep',
b'keypool reserve', b'keypool reserve',
b'keypool return', b'keypool return',
b'Block submitted',
] ]
def __init__(self): def __init__(self):
self.ready = asyncio.Event() self.ready = asyncio.Event()
self.stopped = asyncio.Event() self.stopped = asyncio.Event()
self.log = log.getChild('blockchain') self.log = log.getChild('lbcd')
def pipe_data_received(self, fd, data): def pipe_data_received(self, fd, data):
if self.log and not any(ignore in data for ignore in self.IGNORE_OUTPUT): if self.log and not any(ignore in data for ignore in self.IGNORE_OUTPUT):
@ -262,7 +310,7 @@ class BlockchainProcess(asyncio.SubprocessProtocol):
if b'Error:' in data: if b'Error:' in data:
self.ready.set() self.ready.set()
raise SystemError(data.decode()) raise SystemError(data.decode())
if b'Done loading' in data: if b'RPCS: RPC server listening on' in data:
self.ready.set() self.ready.set()
def process_exited(self): def process_exited(self):
@ -270,39 +318,57 @@ class BlockchainProcess(asyncio.SubprocessProtocol):
self.ready.set() self.ready.set()
class BlockchainNode: class WalletProcess(asyncio.SubprocessProtocol):
P2SH_SEGWIT_ADDRESS = "p2sh-segwit" IGNORE_OUTPUT = [
BECH32_ADDRESS = "bech32" ]
def __init__(self):
self.ready = asyncio.Event()
self.stopped = asyncio.Event()
self.log = log.getChild('lbcwallet')
self.transport: Optional[asyncio.transports.SubprocessTransport] = None
def pipe_data_received(self, fd, data):
if self.log and not any(ignore in data for ignore in self.IGNORE_OUTPUT):
if b'Error:' in data:
self.log.error(data.decode())
else:
self.log.info(data.decode())
if b'Error:' in data:
self.ready.set()
raise SystemError(data.decode())
if b'WLLT: Finished rescan' in data:
self.ready.set()
def process_exited(self):
self.stopped.set()
self.ready.set()
class LBCDNode:
def __init__(self, url, daemon, cli): def __init__(self, url, daemon, cli):
self.latest_release_url = url self.latest_release_url = url
self.project_dir = os.path.dirname(os.path.dirname(__file__)) self.project_dir = os.path.dirname(os.path.dirname(__file__))
self.bin_dir = os.path.join(self.project_dir, 'bin') self.bin_dir = os.path.join(self.project_dir, 'bin')
self.daemon_bin = os.path.join(self.bin_dir, daemon) self.daemon_bin = os.path.join(self.bin_dir, daemon)
self.cli_bin = os.path.join(self.bin_dir, cli) self.cli_bin = os.path.join(self.bin_dir, cli)
self.log = log.getChild('blockchain') self.log = log.getChild('lbcd')
self.data_path = None self.data_path = tempfile.mkdtemp()
self.protocol = None self.protocol = None
self.transport = None self.transport = None
self.block_expected = 0
self.hostname = 'localhost' self.hostname = 'localhost'
self.peerport = 9246 + 2 # avoid conflict with default peer port self.peerport = 29246
self.rpcport = 9245 + 2 # avoid conflict with default rpc port self.rpcport = 29245
self.rpcuser = 'rpcuser' self.rpcuser = 'rpcuser'
self.rpcpassword = 'rpcpassword' self.rpcpassword = 'rpcpassword'
self.stopped = False self.stopped = True
self.restart_ready = asyncio.Event()
self.restart_ready.set()
self.running = asyncio.Event() self.running = asyncio.Event()
@property @property
def rpc_url(self): def rpc_url(self):
return f'http://{self.rpcuser}:{self.rpcpassword}@{self.hostname}:{self.rpcport}/' return f'http://{self.rpcuser}:{self.rpcpassword}@{self.hostname}:{self.rpcport}/'
def is_expected_block(self, e: BlockHeightEvent):
return self.block_expected == e.height
@property @property
def exists(self): def exists(self):
return ( return (
@ -311,6 +377,12 @@ class BlockchainNode:
) )
def download(self): def download(self):
uname = platform.uname()
target_os = str.lower(uname.system)
target_arch = str.replace(uname.machine, 'x86_64', 'amd64')
target_platform = target_os + '_' + target_arch
self.latest_release_url = str.replace(self.latest_release_url, 'TARGET_PLATFORM', target_platform)
downloaded_file = os.path.join( downloaded_file = os.path.join(
self.bin_dir, self.bin_dir,
self.latest_release_url[self.latest_release_url.rfind('/')+1:] self.latest_release_url[self.latest_release_url.rfind('/')+1:]
@ -344,72 +416,206 @@ class BlockchainNode:
return self.exists or self.download() return self.exists or self.download()
async def start(self): async def start(self):
assert self.ensure() if not self.stopped:
self.data_path = tempfile.mkdtemp() return
loop = asyncio.get_event_loop() self.stopped = False
asyncio.get_child_watcher().attach_loop(loop) try:
command = [ assert self.ensure()
self.daemon_bin, loop = asyncio.get_event_loop()
f'-datadir={self.data_path}', '-printtoconsole', '-regtest', '-server', '-txindex', asyncio.get_child_watcher().attach_loop(loop)
f'-rpcuser={self.rpcuser}', f'-rpcpassword={self.rpcpassword}', f'-rpcport={self.rpcport}', command = [
f'-port={self.peerport}' self.daemon_bin,
] '--notls',
self.log.info(' '.join(command)) f'--datadir={self.data_path}',
while not self.stopped: '--regtest', f'--listen=127.0.0.1:{self.peerport}', f'--rpclisten=127.0.0.1:{self.rpcport}',
if self.running.is_set(): '--txindex', f'--rpcuser={self.rpcuser}', f'--rpcpass={self.rpcpassword}'
await asyncio.sleep(1) ]
continue self.log.info(' '.join(command))
await self.restart_ready.wait() self.transport, self.protocol = await loop.subprocess_exec(
try: LBCDProcess, *command
self.transport, self.protocol = await loop.subprocess_exec( )
BlockchainProcess, *command await self.protocol.ready.wait()
) assert not self.protocol.stopped.is_set()
await self.protocol.ready.wait() self.running.set()
assert not self.protocol.stopped.is_set() except asyncio.CancelledError:
self.running.set() self.running.clear()
except asyncio.CancelledError: self.stopped = True
self.running.clear() raise
raise except Exception as e:
except Exception as e: self.running.clear()
self.running.clear() self.stopped = True
log.exception('failed to start lbrycrdd', exc_info=e) log.exception('failed to start lbcd', exc_info=e)
raise
async def stop(self, cleanup=True): async def stop(self, cleanup=True):
if self.stopped:
return
try:
if self.transport:
self.transport.terminate()
await self.protocol.stopped.wait()
self.transport.close()
except Exception as e:
log.exception('failed to stop lbcd', exc_info=e)
raise
finally:
self.log.info("Done shutting down " + self.daemon_bin)
self.stopped = True
if cleanup:
self.cleanup()
self.running.clear()
def cleanup(self):
assert self.stopped
shutil.rmtree(self.data_path, ignore_errors=True)
class LBCWalletNode:
P2SH_SEGWIT_ADDRESS = "p2sh-segwit"
BECH32_ADDRESS = "bech32"
def __init__(self, url, lbcwallet, cli):
self.latest_release_url = url
self.project_dir = os.path.dirname(os.path.dirname(__file__))
self.bin_dir = os.path.join(self.project_dir, 'bin')
self.lbcwallet_bin = os.path.join(self.bin_dir, lbcwallet)
self.cli_bin = os.path.join(self.bin_dir, cli)
self.log = log.getChild('lbcwallet')
self.protocol = None
self.transport = None
self.hostname = 'localhost'
self.lbcd_rpcport = 29245
self.lbcwallet_rpcport = 29244
self.rpcuser = 'rpcuser'
self.rpcpassword = 'rpcpassword'
self.data_path = tempfile.mkdtemp()
self.stopped = True self.stopped = True
self.running = asyncio.Event()
self.block_expected = 0
self.mining_addr = ''
@property
def rpc_url(self):
# FIXME: somehow the hub/sdk doesn't learn the blocks through the Walet RPC port, why?
# return f'http://{self.rpcuser}:{self.rpcpassword}@{self.hostname}:{self.lbcwallet_rpcport}/'
return f'http://{self.rpcuser}:{self.rpcpassword}@{self.hostname}:{self.lbcd_rpcport}/'
def is_expected_block(self, e: BlockHeightEvent):
return self.block_expected == e.height
@property
def exists(self):
return (
os.path.exists(self.lbcwallet_bin)
)
def download(self):
uname = platform.uname()
target_os = str.lower(uname.system)
target_arch = str.replace(uname.machine, 'x86_64', 'amd64')
target_platform = target_os + '_' + target_arch
self.latest_release_url = str.replace(self.latest_release_url, 'TARGET_PLATFORM', target_platform)
downloaded_file = os.path.join(
self.bin_dir,
self.latest_release_url[self.latest_release_url.rfind('/')+1:]
)
if not os.path.exists(self.bin_dir):
os.mkdir(self.bin_dir)
if not os.path.exists(downloaded_file):
self.log.info('Downloading: %s', self.latest_release_url)
with urllib.request.urlopen(self.latest_release_url) as response:
with open(downloaded_file, 'wb') as out_file:
shutil.copyfileobj(response, out_file)
self.log.info('Extracting: %s', downloaded_file)
if downloaded_file.endswith('.zip'):
with zipfile.ZipFile(downloaded_file) as dotzip:
dotzip.extractall(self.bin_dir)
# zipfile bug https://bugs.python.org/issue15795
os.chmod(self.lbcwallet_bin, 0o755)
elif downloaded_file.endswith('.tar.gz'):
with tarfile.open(downloaded_file) as tar:
tar.extractall(self.bin_dir)
return self.exists
def ensure(self):
return self.exists or self.download()
async def start(self):
assert self.ensure()
loop = asyncio.get_event_loop()
asyncio.get_child_watcher().attach_loop(loop)
command = [
self.lbcwallet_bin,
'--noservertls', '--noclienttls',
'--regtest',
f'--rpcconnect=127.0.0.1:{self.lbcd_rpcport}', f'--rpclisten=127.0.0.1:{self.lbcwallet_rpcport}',
'--createtemp', f'--appdata={self.data_path}',
f'--username={self.rpcuser}', f'--password={self.rpcpassword}'
]
self.log.info(' '.join(command))
try:
self.transport, self.protocol = await loop.subprocess_exec(
WalletProcess, *command
)
self.protocol.transport = self.transport
await self.protocol.ready.wait()
assert not self.protocol.stopped.is_set()
self.running.set()
self.stopped = False
except asyncio.CancelledError:
self.running.clear()
raise
except Exception as e:
self.running.clear()
log.exception('failed to start lbcwallet', exc_info=e)
def cleanup(self):
assert self.stopped
shutil.rmtree(self.data_path, ignore_errors=True)
async def stop(self, cleanup=True):
if self.stopped:
return
try: try:
self.transport.terminate() self.transport.terminate()
await self.protocol.stopped.wait() await self.protocol.stopped.wait()
self.transport.close() self.transport.close()
except Exception as e:
log.exception('failed to stop lbcwallet', exc_info=e)
raise
finally: finally:
self.log.info("Done shutting down " + self.lbcwallet_bin)
self.stopped = True
if cleanup: if cleanup:
self.cleanup() self.cleanup()
self.running.clear()
async def clear_mempool(self):
self.restart_ready.clear()
self.transport.terminate()
await self.protocol.stopped.wait()
self.transport.close()
self.running.clear()
os.remove(os.path.join(self.data_path, 'regtest', 'mempool.dat'))
self.restart_ready.set()
await self.running.wait()
def cleanup(self):
shutil.rmtree(self.data_path, ignore_errors=True)
async def _cli_cmnd(self, *args): async def _cli_cmnd(self, *args):
cmnd_args = [ cmnd_args = [
self.cli_bin, f'-datadir={self.data_path}', '-regtest', self.cli_bin,
f'-rpcuser={self.rpcuser}', f'-rpcpassword={self.rpcpassword}', f'-rpcport={self.rpcport}' f'--rpcuser={self.rpcuser}', f'--rpcpass={self.rpcpassword}', '--notls', '--regtest', '--wallet'
] + list(args) ] + list(args)
self.log.info(' '.join(cmnd_args)) self.log.info(' '.join(cmnd_args))
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
asyncio.get_child_watcher().attach_loop(loop) asyncio.get_child_watcher().attach_loop(loop)
process = await asyncio.create_subprocess_exec( process = await asyncio.create_subprocess_exec(
*cmnd_args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT *cmnd_args, stdout=subprocess.PIPE, stderr=subprocess.PIPE
) )
out, _ = await process.communicate() out, err = await process.communicate()
result = out.decode().strip() result = out.decode().strip()
err = err.decode().strip()
if len(result) <= 0 and err.startswith('-'):
raise Exception(err)
if err and 'creating a default config file' not in err:
log.warning(err)
self.log.info(result) self.log.info(result)
if result.startswith('error code'): if result.startswith('error code'):
raise Exception(result) raise Exception(result)
@ -417,7 +623,14 @@ class BlockchainNode:
def generate(self, blocks): def generate(self, blocks):
self.block_expected += blocks self.block_expected += blocks
return self._cli_cmnd('generate', str(blocks)) return self._cli_cmnd('generatetoaddress', str(blocks), self.mining_addr)
def generate_to_address(self, blocks, addr):
self.block_expected += blocks
return self._cli_cmnd('generatetoaddress', str(blocks), addr)
def wallet_passphrase(self, passphrase, timeout):
return self._cli_cmnd('walletpassphrase', passphrase, str(timeout))
def invalidate_block(self, blockhash): def invalidate_block(self, blockhash):
return self._cli_cmnd('invalidateblock', blockhash) return self._cli_cmnd('invalidateblock', blockhash)
@ -434,7 +647,7 @@ class BlockchainNode:
def get_raw_change_address(self): def get_raw_change_address(self):
return self._cli_cmnd('getrawchangeaddress') return self._cli_cmnd('getrawchangeaddress')
def get_new_address(self, address_type): def get_new_address(self, address_type='legacy'):
return self._cli_cmnd('getnewaddress', "", address_type) return self._cli_cmnd('getnewaddress', "", address_type)
async def get_balance(self): async def get_balance(self):
@ -450,7 +663,10 @@ class BlockchainNode:
return self._cli_cmnd('createrawtransaction', json.dumps(inputs), json.dumps(outputs)) return self._cli_cmnd('createrawtransaction', json.dumps(inputs), json.dumps(outputs))
async def sign_raw_transaction_with_wallet(self, tx): async def sign_raw_transaction_with_wallet(self, tx):
return json.loads(await self._cli_cmnd('signrawtransactionwithwallet', tx))['hex'].encode() # the "withwallet" portion should only come into play if we are doing segwit.
# and "withwallet" doesn't exist on lbcd yet.
result = await self._cli_cmnd('signrawtransaction', tx)
return json.loads(result)['hex'].encode()
def decode_raw_transaction(self, tx): def decode_raw_transaction(self, tx):
return self._cli_cmnd('decoderawtransaction', hexlify(tx.raw).decode()) return self._cli_cmnd('decoderawtransaction', hexlify(tx.raw).decode())
@ -460,12 +676,15 @@ class BlockchainNode:
class HubProcess(asyncio.SubprocessProtocol): class HubProcess(asyncio.SubprocessProtocol):
def __init__(self): def __init__(self, ready, stopped):
self.ready = asyncio.Event() self.ready = ready
self.stopped = asyncio.Event() self.stopped = stopped
self.log = log.getChild('hub') self.log = log.getChild('hub')
self.transport = None
def pipe_data_received(self, fd, data): def pipe_data_received(self, fd, data):
self.stopped.clear()
self.ready.set()
if self.log: if self.log:
self.log.info(data.decode()) self.log.info(data.decode())
if b'error' in data.lower(): if b'error' in data.lower():
@ -479,16 +698,26 @@ class HubProcess(asyncio.SubprocessProtocol):
print(line) print(line)
def process_exited(self): def process_exited(self):
self.ready.clear()
self.stopped.set() self.stopped.set()
self.ready.set()
async def stop(self):
t = asyncio.create_task(self.stopped.wait())
try:
self.transport.send_signal(signal.SIGINT)
await asyncio.wait_for(t, 3)
# log.warning("stopped go hub")
except asyncio.TimeoutError:
if not t.done():
t.cancel()
self.transport.terminate()
await self.stopped.wait()
log.warning("terminated go hub")
class HubNode: class HubNode:
def __init__(self, url, daemon, spv_node): def __init__(self, url, daemon, spv_node):
self.spv_node = spv_node self.spv_node = spv_node
self.debug = False
self.latest_release_url = url self.latest_release_url = url
self.project_dir = os.path.dirname(os.path.dirname(__file__)) self.project_dir = os.path.dirname(os.path.dirname(__file__))
self.bin_dir = os.path.join(self.project_dir, 'bin') self.bin_dir = os.path.join(self.project_dir, 'bin')
@ -499,11 +728,13 @@ class HubNode:
self.protocol = None self.protocol = None
self.hostname = 'localhost' self.hostname = 'localhost'
self.rpcport = 50051 # avoid conflict with default rpc port self.rpcport = 50051 # avoid conflict with default rpc port
self.stopped = False self._stopped = asyncio.Event()
self.restart_ready = asyncio.Event()
self.restart_ready.set()
self.running = asyncio.Event() self.running = asyncio.Event()
@property
def stopped(self):
return not self.running.is_set()
@property @property
def exists(self): def exists(self):
return ( return (
@ -554,33 +785,24 @@ class HubNode:
self.daemon_bin, 'serve', '--esindex', self.spv_node.index_name + 'claims', '--debug' self.daemon_bin, 'serve', '--esindex', self.spv_node.index_name + 'claims', '--debug'
] ]
self.log.info(' '.join(command)) self.log.info(' '.join(command))
while not self.stopped: self.protocol = HubProcess(self.running, self._stopped)
if self.running.is_set(): try:
await asyncio.sleep(1) self.transport, _ = await loop.subprocess_exec(
continue lambda: self.protocol, *command
await self.restart_ready.wait() )
try: self.protocol.transport = self.transport
if not self.debug: except Exception as e:
self.transport, self.protocol = await loop.subprocess_exec( log.exception('failed to start go hub', exc_info=e)
HubProcess, *command raise e
) await self.protocol.ready.wait()
await self.protocol.ready.wait()
assert not self.protocol.stopped.is_set()
self.running.set()
except asyncio.CancelledError:
self.running.clear()
raise
except Exception as e:
self.running.clear()
log.exception('failed to start hub', exc_info=e)
async def stop(self, cleanup=True): async def stop(self, cleanup=True):
self.stopped = True
try: try:
if not self.debug: if self.protocol:
self.transport.terminate() await self.protocol.stop()
await self.protocol.stopped.wait() except Exception as e:
self.transport.close() log.exception('failed to stop go hub', exc_info=e)
raise e
finally: finally:
if cleanup: if cleanup:
self.cleanup() self.cleanup()

View file

@ -61,8 +61,10 @@ class ConductorService:
#set_logging( #set_logging(
# self.stack.ledger_module, logging.DEBUG, WebSocketLogHandler(self.send_message) # self.stack.ledger_module, logging.DEBUG, WebSocketLogHandler(self.send_message)
#) #)
self.stack.blockchain_started or await self.stack.start_blockchain() self.stack.lbcd_started or await self.stack.start_lbcd()
self.send_message({'type': 'service', 'name': 'blockchain', 'port': self.stack.blockchain_node.port}) self.send_message({'type': 'service', 'name': 'lbcd', 'port': self.stack.lbcd_node.port})
self.stack.lbcwallet_started or await self.stack.start_lbcwallet()
self.send_message({'type': 'service', 'name': 'lbcwallet', 'port': self.stack.lbcwallet_node.port})
self.stack.spv_started or await self.stack.start_spv() self.stack.spv_started or await self.stack.start_spv()
self.send_message({'type': 'service', 'name': 'spv', 'port': self.stack.spv_node.port}) self.send_message({'type': 'service', 'name': 'spv', 'port': self.stack.spv_node.port})
self.stack.wallet_started or await self.stack.start_wallet() self.stack.wallet_started or await self.stack.start_wallet()
@ -74,7 +76,7 @@ class ConductorService:
async def generate(self, request): async def generate(self, request):
data = await request.post() data = await request.post()
blocks = data.get('blocks', 1) blocks = data.get('blocks', 1)
await self.stack.blockchain_node.generate(int(blocks)) await self.stack.lbcwallet_node.generate(int(blocks))
return json_response({'blocks': blocks}) return json_response({'blocks': blocks})
async def transfer(self, request): async def transfer(self, request):
@ -85,11 +87,14 @@ class ConductorService:
if not address: if not address:
raise ValueError("No address was provided.") raise ValueError("No address was provided.")
amount = data.get('amount', 1) amount = data.get('amount', 1)
txid = await self.stack.blockchain_node.send_to_address(address, amount)
if self.stack.wallet_started: if self.stack.wallet_started:
await self.stack.wallet_node.ledger.on_transaction.where( watcher = self.stack.wallet_node.ledger.on_transaction.where(
lambda e: e.tx.id == txid and e.address == address lambda e: e.address == address # and e.tx.id == txid -- might stall; see send_to_address_and_wait
) )
txid = await self.stack.lbcwallet_node.send_to_address(address, amount)
await watcher
else:
txid = await self.stack.lbcwallet_node.send_to_address(address, amount)
return json_response({ return json_response({
'address': address, 'address': address,
'amount': amount, 'amount': amount,
@ -98,7 +103,7 @@ class ConductorService:
async def balance(self, _): async def balance(self, _):
return json_response({ return json_response({
'balance': await self.stack.blockchain_node.get_balance() 'balance': await self.stack.lbcwallet_node.get_balance()
}) })
async def log(self, request): async def log(self, request):
@ -129,7 +134,7 @@ class ConductorService:
'type': 'status', 'type': 'status',
'height': self.stack.wallet_node.ledger.headers.height, 'height': self.stack.wallet_node.ledger.headers.height,
'balance': satoshis_to_coins(await self.stack.wallet_node.account.get_balance()), 'balance': satoshis_to_coins(await self.stack.wallet_node.account.get_balance()),
'miner': await self.stack.blockchain_node.get_balance() 'miner': await self.stack.lbcwallet_node.get_balance()
}) })
def send_message(self, msg): def send_message(self, msg):

File diff suppressed because it is too large Load diff

View file

@ -1,34 +0,0 @@
import logging
import traceback
import argparse
from lbry.wallet.server.env import Env
from lbry.wallet.server.server import Server
def get_argument_parser():
parser = argparse.ArgumentParser(
prog="lbry-hub"
)
Env.contribute_to_arg_parser(parser)
return parser
def main():
parser = get_argument_parser()
args = parser.parse_args()
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)-4s %(name)s:%(lineno)d: %(message)s")
logging.info('lbry.server starting')
logging.getLogger('aiohttp').setLevel(logging.WARNING)
logging.getLogger('elasticsearch').setLevel(logging.WARNING)
try:
server = Server(Env.from_arg_parser(args))
server.run()
except Exception:
traceback.print_exc()
logging.critical('lbry.server terminated abnormally')
else:
logging.info('lbry.server terminated normally')
if __name__ == "__main__":
main()

View file

@ -1,386 +0,0 @@
import re
import struct
from typing import List
from hashlib import sha256
from decimal import Decimal
from collections import namedtuple
import lbry.wallet.server.tx as lib_tx
from lbry.wallet.script import OutputScript, OP_CLAIM_NAME, OP_UPDATE_CLAIM, OP_SUPPORT_CLAIM
from lbry.wallet.server.tx import DeserializerSegWit
from lbry.wallet.server.util import cachedproperty, subclasses
from lbry.wallet.server.hash import Base58, hash160, double_sha256, hash_to_hex_str, HASHX_LEN
from lbry.wallet.server.daemon import Daemon, LBCDaemon
from lbry.wallet.server.script import ScriptPubKey, OpCodes
from lbry.wallet.server.leveldb import LevelDB
from lbry.wallet.server.session import LBRYElectrumX, LBRYSessionManager
from lbry.wallet.server.block_processor import BlockProcessor
Block = namedtuple("Block", "raw header transactions")
OP_RETURN = OpCodes.OP_RETURN
class CoinError(Exception):
"""Exception raised for coin-related errors."""
class Coin:
"""Base class of coin hierarchy."""
REORG_LIMIT = 200
# Not sure if these are coin-specific
RPC_URL_REGEX = re.compile('.+@(\\[[0-9a-fA-F:]+\\]|[^:]+)(:[0-9]+)?')
VALUE_PER_COIN = 100000000
CHUNK_SIZE = 2016
BASIC_HEADER_SIZE = 80
STATIC_BLOCK_HEADERS = True
SESSIONCLS = LBRYElectrumX
DESERIALIZER = lib_tx.Deserializer
DAEMON = Daemon
BLOCK_PROCESSOR = BlockProcessor
SESSION_MANAGER = LBRYSessionManager
DB = LevelDB
HEADER_VALUES = [
'version', 'prev_block_hash', 'merkle_root', 'timestamp', 'bits', 'nonce'
]
HEADER_UNPACK = struct.Struct('< I 32s 32s I I I').unpack_from
MEMPOOL_HISTOGRAM_REFRESH_SECS = 500
XPUB_VERBYTES = bytes('????', 'utf-8')
XPRV_VERBYTES = bytes('????', 'utf-8')
ENCODE_CHECK = Base58.encode_check
DECODE_CHECK = Base58.decode_check
# Peer discovery
PEER_DEFAULT_PORTS = {'t': '50001', 's': '50002'}
PEERS: List[str] = []
@classmethod
def lookup_coin_class(cls, name, net):
"""Return a coin class given name and network.
Raise an exception if unrecognised."""
req_attrs = ['TX_COUNT', 'TX_COUNT_HEIGHT', 'TX_PER_BLOCK']
for coin in subclasses(Coin):
if (coin.NAME.lower() == name.lower() and
coin.NET.lower() == net.lower()):
coin_req_attrs = req_attrs.copy()
missing = [attr for attr in coin_req_attrs
if not hasattr(coin, attr)]
if missing:
raise CoinError(f'coin {name} missing {missing} attributes')
return coin
raise CoinError(f'unknown coin {name} and network {net} combination')
@classmethod
def sanitize_url(cls, url):
# Remove surrounding ws and trailing /s
url = url.strip().rstrip('/')
match = cls.RPC_URL_REGEX.match(url)
if not match:
raise CoinError(f'invalid daemon URL: "{url}"')
if match.groups()[1] is None:
url += f':{cls.RPC_PORT:d}'
if not url.startswith('http://') and not url.startswith('https://'):
url = 'http://' + url
return url + '/'
@classmethod
def genesis_block(cls, block):
"""Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
"""
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
return header + bytes(1)
@classmethod
def hashX_from_script(cls, script):
"""Returns a hashX from a script, or None if the script is provably
unspendable so the output can be dropped.
"""
if script and script[0] == OP_RETURN:
return None
return sha256(script).digest()[:HASHX_LEN]
@staticmethod
def lookup_xverbytes(verbytes):
"""Return a (is_xpub, coin_class) pair given xpub/xprv verbytes."""
# Order means BTC testnet will override NMC testnet
for coin in subclasses(Coin):
if verbytes == coin.XPUB_VERBYTES:
return True, coin
if verbytes == coin.XPRV_VERBYTES:
return False, coin
raise CoinError('version bytes unrecognised')
@classmethod
def address_to_hashX(cls, address):
"""Return a hashX given a coin address."""
return cls.hashX_from_script(cls.pay_to_address_script(address))
@classmethod
def P2PKH_address_from_hash160(cls, hash160):
"""Return a P2PKH address given a public key."""
assert len(hash160) == 20
return cls.ENCODE_CHECK(cls.P2PKH_VERBYTE + hash160)
@classmethod
def P2PKH_address_from_pubkey(cls, pubkey):
"""Return a coin address given a public key."""
return cls.P2PKH_address_from_hash160(hash160(pubkey))
@classmethod
def P2SH_address_from_hash160(cls, hash160):
"""Return a coin address given a hash160."""
assert len(hash160) == 20
return cls.ENCODE_CHECK(cls.P2SH_VERBYTES[0] + hash160)
@classmethod
def hash160_to_P2PKH_script(cls, hash160):
return ScriptPubKey.P2PKH_script(hash160)
@classmethod
def hash160_to_P2PKH_hashX(cls, hash160):
return cls.hashX_from_script(cls.hash160_to_P2PKH_script(hash160))
@classmethod
def pay_to_address_script(cls, address):
"""Return a pubkey script that pays to a pubkey hash.
Pass the address (either P2PKH or P2SH) in base58 form.
"""
raw = cls.DECODE_CHECK(address)
# Require version byte(s) plus hash160.
verbyte = -1
verlen = len(raw) - 20
if verlen > 0:
verbyte, hash160 = raw[:verlen], raw[verlen:]
if verbyte == cls.P2PKH_VERBYTE:
return cls.hash160_to_P2PKH_script(hash160)
if verbyte in cls.P2SH_VERBYTES:
return ScriptPubKey.P2SH_script(hash160)
raise CoinError(f'invalid address: {address}')
@classmethod
def privkey_WIF(cls, privkey_bytes, compressed):
"""Return the private key encoded in Wallet Import Format."""
payload = bytearray(cls.WIF_BYTE) + privkey_bytes
if compressed:
payload.append(0x01)
return cls.ENCODE_CHECK(payload)
@classmethod
def header_hash(cls, header):
"""Given a header return hash"""
return double_sha256(header)
@classmethod
def header_prevhash(cls, header):
"""Given a header return previous hash"""
return header[4:36]
@classmethod
def static_header_offset(cls, height):
"""Given a header height return its offset in the headers file.
If header sizes change at some point, this is the only code
that needs updating."""
assert cls.STATIC_BLOCK_HEADERS
return height * cls.BASIC_HEADER_SIZE
@classmethod
def static_header_len(cls, height):
"""Given a header height return its length."""
return (cls.static_header_offset(height + 1)
- cls.static_header_offset(height))
@classmethod
def block_header(cls, block, height):
"""Returns the block header given a block and its height."""
return block[:cls.static_header_len(height)]
@classmethod
def block(cls, raw_block, height):
"""Return a Block namedtuple given a raw block and its height."""
header = cls.block_header(raw_block, height)
txs = cls.DESERIALIZER(raw_block, start=len(header)).read_tx_block()
return Block(raw_block, header, txs)
@classmethod
def transaction(cls, raw_tx: bytes):
"""Return a Block namedtuple given a raw block and its height."""
return cls.DESERIALIZER(raw_tx).read_tx()
@classmethod
def decimal_value(cls, value):
"""Return the number of standard coin units as a Decimal given a
quantity of smallest units.
For example 1 BTC is returned for 100 million satoshis.
"""
return Decimal(value) / cls.VALUE_PER_COIN
@classmethod
def electrum_header(cls, header, height):
h = dict(zip(cls.HEADER_VALUES, cls.HEADER_UNPACK(header)))
# Add the height that is not present in the header itself
h['block_height'] = height
# Convert bytes to str
h['prev_block_hash'] = hash_to_hex_str(h['prev_block_hash'])
h['merkle_root'] = hash_to_hex_str(h['merkle_root'])
return h
class LBC(Coin):
DAEMON = LBCDaemon
SESSIONCLS = LBRYElectrumX
SESSION_MANAGER = LBRYSessionManager
DESERIALIZER = DeserializerSegWit
DB = LevelDB
NAME = "LBRY"
SHORTNAME = "LBC"
NET = "mainnet"
BASIC_HEADER_SIZE = 112
CHUNK_SIZE = 96
XPUB_VERBYTES = bytes.fromhex("0488b21e")
XPRV_VERBYTES = bytes.fromhex("0488ade4")
P2PKH_VERBYTE = bytes.fromhex("55")
P2SH_VERBYTES = bytes.fromhex("7A")
WIF_BYTE = bytes.fromhex("1C")
GENESIS_HASH = ('9c89283ba0f3227f6c03b70216b9f665'
'f0118d5e0fa729cedf4fb34d6a34f463')
TX_COUNT = 2716936
TX_COUNT_HEIGHT = 329554
TX_PER_BLOCK = 1
RPC_PORT = 9245
REORG_LIMIT = 200
nOriginalClaimExpirationTime = 262974
nExtendedClaimExpirationTime = 2102400
nExtendedClaimExpirationForkHeight = 400155
nNormalizedNameForkHeight = 539940 # targeting 21 March 2019
nMinTakeoverWorkaroundHeight = 496850
nMaxTakeoverWorkaroundHeight = 658300 # targeting 30 Oct 2019
nWitnessForkHeight = 680770 # targeting 11 Dec 2019
nAllClaimsInMerkleForkHeight = 658310 # targeting 30 Oct 2019
proportionalDelayFactor = 32
maxTakeoverDelay = 4032
PEERS = [
]
@classmethod
def genesis_block(cls, block):
'''Check the Genesis block is the right one for this coin.
Return the block less its unspendable coinbase.
'''
header = cls.block_header(block, 0)
header_hex_hash = hash_to_hex_str(cls.header_hash(header))
if header_hex_hash != cls.GENESIS_HASH:
raise CoinError(f'genesis block has hash {header_hex_hash} expected {cls.GENESIS_HASH}')
return block
@classmethod
def electrum_header(cls, header, height):
version, = struct.unpack('<I', header[:4])
timestamp, bits, nonce = struct.unpack('<III', header[100:112])
return {
'version': version,
'prev_block_hash': hash_to_hex_str(header[4:36]),
'merkle_root': hash_to_hex_str(header[36:68]),
'claim_trie_root': hash_to_hex_str(header[68:100]),
'timestamp': timestamp,
'bits': bits,
'nonce': nonce,
'block_height': height,
}
@cachedproperty
def address_handlers(self):
return ScriptPubKey.PayToHandlers(
address=self.P2PKH_address_from_hash160,
script_hash=self.P2SH_address_from_hash160,
pubkey=self.P2PKH_address_from_pubkey,
unspendable=lambda: None,
strange=self.claim_address_handler,
)
@classmethod
def address_from_script(cls, script):
'''Given a pk_script, return the address it pays to, or None.'''
return ScriptPubKey.pay_to(cls.address_handlers, script)
@classmethod
def claim_address_handler(cls, script):
'''Parse a claim script, returns the address
'''
output = OutputScript(script)
if output.is_pay_pubkey_hash:
return cls.P2PKH_address_from_hash160(output.values['pubkey_hash'])
if output.is_pay_script_hash:
return cls.P2SH_address_from_hash160(output.values['script_hash'])
if output.is_pay_pubkey:
return cls.P2PKH_address_from_pubkey(output.values['pubkey'])
if output.is_return_data:
return None
return None
@classmethod
def hashX_from_script(cls, script):
'''
Overrides electrumx hashX from script by extracting addresses from claim scripts.
'''
if script and script[0] == OpCodes.OP_RETURN or not script:
return None
if script[0] in [
OP_CLAIM_NAME,
OP_UPDATE_CLAIM,
OP_SUPPORT_CLAIM,
]:
return cls.address_to_hashX(cls.claim_address_handler(script))
else:
return sha256(script).digest()[:HASHX_LEN]
@classmethod
def get_expiration_height(cls, last_updated_height: int, extended: bool = False) -> int:
if extended:
return last_updated_height + cls.nExtendedClaimExpirationTime
if last_updated_height < cls.nExtendedClaimExpirationForkHeight:
return last_updated_height + cls.nOriginalClaimExpirationTime
return last_updated_height + cls.nExtendedClaimExpirationTime
@classmethod
def get_delay_for_name(cls, blocks_of_continuous_ownership: int) -> int:
return min(blocks_of_continuous_ownership // cls.proportionalDelayFactor, cls.maxTakeoverDelay)
class LBCRegTest(LBC):
NET = "regtest"
GENESIS_HASH = '6e3fcf1299d4ec5d79c3a4c91d624a4acf9e2e173d95a1a0504f677669687556'
XPUB_VERBYTES = bytes.fromhex('043587cf')
XPRV_VERBYTES = bytes.fromhex('04358394')
P2PKH_VERBYTE = bytes.fromhex("6f")
P2SH_VERBYTES = bytes.fromhex("c4")
nOriginalClaimExpirationTime = 500
nExtendedClaimExpirationTime = 600
nExtendedClaimExpirationForkHeight = 800
nNormalizedNameForkHeight = 250
nMinTakeoverWorkaroundHeight = -1
nMaxTakeoverWorkaroundHeight = -1
nWitnessForkHeight = 150
nAllClaimsInMerkleForkHeight = 350
class LBCTestNet(LBCRegTest):
NET = "testnet"
GENESIS_HASH = '9c89283ba0f3227f6c03b70216b9f665f0118d5e0fa729cedf4fb34d6a34f463'

View file

@ -1,375 +0,0 @@
import asyncio
import itertools
import json
import time
from functools import wraps
import aiohttp
from prometheus_client import Gauge, Histogram
from lbry.utils import LRUCacheWithMetrics
from lbry.wallet.rpc.jsonrpc import RPCError
from lbry.wallet.server.util import hex_to_bytes, class_logger
from lbry.wallet.rpc import JSONRPC
class DaemonError(Exception):
"""Raised when the daemon returns an error in its results."""
class WarmingUpError(Exception):
"""Internal - when the daemon is warming up."""
class WorkQueueFullError(Exception):
"""Internal - when the daemon's work queue is full."""
NAMESPACE = "wallet_server"
class Daemon:
"""Handles connections to a daemon at the given URL."""
WARMING_UP = -28
id_counter = itertools.count()
lbrycrd_request_time_metric = Histogram(
"lbrycrd_request", "lbrycrd requests count", namespace=NAMESPACE, labelnames=("method",)
)
lbrycrd_pending_count_metric = Gauge(
"lbrycrd_pending_count", "Number of lbrycrd rpcs that are in flight", namespace=NAMESPACE,
labelnames=("method",)
)
def __init__(self, coin, url, max_workqueue=10, init_retry=0.25,
max_retry=4.0):
self.coin = coin
self.logger = class_logger(__name__, self.__class__.__name__)
self.set_url(url)
# Limit concurrent RPC calls to this number.
# See DEFAULT_HTTP_WORKQUEUE in bitcoind, which is typically 16
self.workqueue_semaphore = asyncio.Semaphore(value=max_workqueue)
self.init_retry = init_retry
self.max_retry = max_retry
self._height = None
self.available_rpcs = {}
self.connector = aiohttp.TCPConnector()
self._block_hash_cache = LRUCacheWithMetrics(100000)
self._block_cache = LRUCacheWithMetrics(2 ** 13, metric_name='block', namespace=NAMESPACE)
async def close(self):
if self.connector:
await self.connector.close()
self.connector = None
def set_url(self, url):
"""Set the URLS to the given list, and switch to the first one."""
urls = url.split(',')
urls = [self.coin.sanitize_url(url) for url in urls]
for n, url in enumerate(urls):
status = '' if n else ' (current)'
logged_url = self.logged_url(url)
self.logger.info(f'daemon #{n + 1} at {logged_url}{status}')
self.url_index = 0
self.urls = urls
def current_url(self):
"""Returns the current daemon URL."""
return self.urls[self.url_index]
def logged_url(self, url=None):
"""The host and port part, for logging."""
url = url or self.current_url()
return url[url.rindex('@') + 1:]
def failover(self):
"""Call to fail-over to the next daemon URL.
Returns False if there is only one, otherwise True.
"""
if len(self.urls) > 1:
self.url_index = (self.url_index + 1) % len(self.urls)
self.logger.info(f'failing over to {self.logged_url()}')
return True
return False
def client_session(self):
"""An aiohttp client session."""
return aiohttp.ClientSession(connector=self.connector, connector_owner=False)
async def _send_data(self, data):
if not self.connector:
raise asyncio.CancelledError('Tried to send request during shutdown.')
async with self.workqueue_semaphore:
async with self.client_session() as session:
async with session.post(self.current_url(), data=data) as resp:
kind = resp.headers.get('Content-Type', None)
if kind == 'application/json':
return await resp.json()
# bitcoind's HTTP protocol "handling" is a bad joke
text = await resp.text()
if 'Work queue depth exceeded' in text:
raise WorkQueueFullError
text = text.strip() or resp.reason
self.logger.error(text)
raise DaemonError(text)
async def _send(self, payload, processor):
"""Send a payload to be converted to JSON.
Handles temporary connection issues. Daemon response errors
are raise through DaemonError.
"""
def log_error(error):
nonlocal last_error_log, retry
now = time.time()
if now - last_error_log > 60:
last_error_log = now
self.logger.error(f'{error} Retrying occasionally...')
if retry == self.max_retry and self.failover():
retry = 0
on_good_message = None
last_error_log = 0
data = json.dumps(payload)
retry = self.init_retry
methods = tuple(
[payload['method']] if isinstance(payload, dict) else [request['method'] for request in payload]
)
while True:
try:
for method in methods:
self.lbrycrd_pending_count_metric.labels(method=method).inc()
result = await self._send_data(data)
result = processor(result)
if on_good_message:
self.logger.info(on_good_message)
return result
except asyncio.TimeoutError:
log_error('timeout error.')
except aiohttp.ServerDisconnectedError:
log_error('disconnected.')
on_good_message = 'connection restored'
except aiohttp.ClientConnectionError:
log_error('connection problem - is your daemon running?')
on_good_message = 'connection restored'
except aiohttp.ClientError as e:
log_error(f'daemon error: {e}')
on_good_message = 'running normally'
except WarmingUpError:
log_error('starting up checking blocks.')
on_good_message = 'running normally'
except WorkQueueFullError:
log_error('work queue full.')
on_good_message = 'running normally'
finally:
for method in methods:
self.lbrycrd_pending_count_metric.labels(method=method).dec()
await asyncio.sleep(retry)
retry = max(min(self.max_retry, retry * 2), self.init_retry)
async def _send_single(self, method, params=None):
"""Send a single request to the daemon."""
start = time.perf_counter()
def processor(result):
err = result['error']
if not err:
return result['result']
if err.get('code') == self.WARMING_UP:
raise WarmingUpError
raise DaemonError(err)
payload = {'method': method, 'id': next(self.id_counter)}
if params:
payload['params'] = params
result = await self._send(payload, processor)
self.lbrycrd_request_time_metric.labels(method=method).observe(time.perf_counter() - start)
return result
async def _send_vector(self, method, params_iterable, replace_errs=False):
"""Send several requests of the same method.
The result will be an array of the same length as params_iterable.
If replace_errs is true, any item with an error is returned as None,
otherwise an exception is raised."""
start = time.perf_counter()
def processor(result):
errs = [item['error'] for item in result if item['error']]
if any(err.get('code') == self.WARMING_UP for err in errs):
raise WarmingUpError
if not errs or replace_errs:
return [item['result'] for item in result]
raise DaemonError(errs)
payload = [{'method': method, 'params': p, 'id': next(self.id_counter)}
for p in params_iterable]
result = []
if payload:
result = await self._send(payload, processor)
self.lbrycrd_request_time_metric.labels(method=method).observe(time.perf_counter() - start)
return result
async def _is_rpc_available(self, method):
"""Return whether given RPC method is available in the daemon.
Results are cached and the daemon will generally not be queried with
the same method more than once."""
available = self.available_rpcs.get(method)
if available is None:
available = True
try:
await self._send_single(method)
except DaemonError as e:
err = e.args[0]
error_code = err.get("code")
available = error_code != JSONRPC.METHOD_NOT_FOUND
self.available_rpcs[method] = available
return available
async def block_hex_hashes(self, first, count):
"""Return the hex hashes of count block starting at height first."""
if first + count < (self.cached_height() or 0) - 200:
return await self._cached_block_hex_hashes(first, count)
params_iterable = ((h, ) for h in range(first, first + count))
return await self._send_vector('getblockhash', params_iterable)
async def _cached_block_hex_hashes(self, first, count):
"""Return the hex hashes of count block starting at height first."""
cached = self._block_hash_cache.get((first, count))
if cached:
return cached
params_iterable = ((h, ) for h in range(first, first + count))
self._block_hash_cache[(first, count)] = await self._send_vector('getblockhash', params_iterable)
return self._block_hash_cache[(first, count)]
async def deserialised_block(self, hex_hash):
"""Return the deserialised block with the given hex hash."""
if hex_hash not in self._block_cache:
block = await self._send_single('getblock', (hex_hash, True))
self._block_cache[hex_hash] = block
return block
return self._block_cache[hex_hash]
async def raw_blocks(self, hex_hashes):
"""Return the raw binary blocks with the given hex hashes."""
params_iterable = ((h, False) for h in hex_hashes)
blocks = await self._send_vector('getblock', params_iterable)
# Convert hex string to bytes
return [hex_to_bytes(block) for block in blocks]
async def mempool_hashes(self):
"""Update our record of the daemon's mempool hashes."""
return await self._send_single('getrawmempool')
async def estimatefee(self, block_count):
"""Return the fee estimate for the block count. Units are whole
currency units per KB, e.g. 0.00000995, or -1 if no estimate
is available.
"""
args = (block_count, )
if await self._is_rpc_available('estimatesmartfee'):
estimate = await self._send_single('estimatesmartfee', args)
return estimate.get('feerate', -1)
return await self._send_single('estimatefee', args)
async def getnetworkinfo(self):
"""Return the result of the 'getnetworkinfo' RPC call."""
return await self._send_single('getnetworkinfo')
async def relayfee(self):
"""The minimum fee a low-priority tx must pay in order to be accepted
to the daemon's memory pool."""
network_info = await self.getnetworkinfo()
return network_info['relayfee']
async def getrawtransaction(self, hex_hash, verbose=False):
"""Return the serialized raw transaction with the given hash."""
# Cast to int because some coin daemons are old and require it
return await self._send_single('getrawtransaction',
(hex_hash, int(verbose)))
async def getrawtransactions(self, hex_hashes, replace_errs=True):
"""Return the serialized raw transactions with the given hashes.
Replaces errors with None by default."""
params_iterable = ((hex_hash, 0) for hex_hash in hex_hashes)
txs = await self._send_vector('getrawtransaction', params_iterable,
replace_errs=replace_errs)
# Convert hex strings to bytes
return [hex_to_bytes(tx) if tx else None for tx in txs]
async def broadcast_transaction(self, raw_tx):
"""Broadcast a transaction to the network."""
return await self._send_single('sendrawtransaction', (raw_tx, ))
async def height(self):
"""Query the daemon for its current height."""
self._height = await self._send_single('getblockcount')
return self._height
def cached_height(self):
"""Return the cached daemon height.
If the daemon has not been queried yet this returns None."""
return self._height
def handles_errors(decorated_function):
@wraps(decorated_function)
async def wrapper(*args, **kwargs):
try:
return await decorated_function(*args, **kwargs)
except DaemonError as daemon_error:
raise RPCError(1, daemon_error.args[0])
return wrapper
class LBCDaemon(Daemon):
@handles_errors
async def getrawtransaction(self, hex_hash, verbose=False):
return await super().getrawtransaction(hex_hash=hex_hash, verbose=verbose)
@handles_errors
async def getclaimbyid(self, claim_id):
'''Given a claim id, retrieves claim information.'''
return await self._send_single('getclaimbyid', (claim_id,))
@handles_errors
async def getclaimsbyids(self, claim_ids):
'''Given a list of claim ids, batches calls to retrieve claim information.'''
return await self._send_vector('getclaimbyid', ((claim_id,) for claim_id in claim_ids))
@handles_errors
async def getclaimsforname(self, name):
'''Given a name, retrieves all claims matching that name.'''
return await self._send_single('getclaimsforname', (name,))
@handles_errors
async def getclaimsfortx(self, txid):
'''Given a txid, returns the claims it make.'''
return await self._send_single('getclaimsfortx', (txid,)) or []
@handles_errors
async def getnameproof(self, name, block_hash=None):
'''Given a name and optional block_hash, returns a name proof and winner, if any.'''
return await self._send_single('getnameproof', (name, block_hash,) if block_hash else (name,))
@handles_errors
async def getvalueforname(self, name):
'''Given a name, returns the winning claim value.'''
return await self._send_single('getvalueforname', (name,))
@handles_errors
async def getnamesintrie(self):
'''Given a name, returns the winning claim value.'''
return await self._send_single('getnamesintrie')
@handles_errors
async def claimname(self, name, hexvalue, amount):
'''Claim a name, used for functional tests only.'''
return await self._send_single('claimname', (name, hexvalue, float(amount)))

View file

@ -1,42 +0,0 @@
import enum
@enum.unique
class DB_PREFIXES(enum.Enum):
claim_to_support = b'K'
support_to_claim = b'L'
claim_to_txo = b'E'
txo_to_claim = b'G'
claim_to_channel = b'I'
channel_to_claim = b'J'
claim_short_id_prefix = b'F'
effective_amount = b'D'
claim_expiration = b'O'
claim_takeover = b'P'
pending_activation = b'Q'
activated_claim_and_support = b'R'
active_amount = b'S'
repost = b'V'
reposted_claim = b'W'
undo = b'M'
claim_diff = b'Y'
tx = b'B'
block_hash = b'C'
header = b'H'
tx_num = b'N'
tx_count = b'T'
tx_hash = b'X'
utxo = b'u'
hashx_utxo = b'h'
hashx_history = b'x'
db_state = b's'
channel_count = b'Z'
support_amount = b'a'
block_txs = b'b'

View file

@ -1,447 +0,0 @@
import typing
CLAIM_TYPES = {
'stream': 1,
'channel': 2,
'repost': 3,
'collection': 4,
}
STREAM_TYPES = {
'video': 1,
'audio': 2,
'image': 3,
'document': 4,
'binary': 5,
'model': 6,
}
# 9/21/2020
MOST_USED_TAGS = {
"gaming",
"people & blogs",
"entertainment",
"music",
"pop culture",
"education",
"technology",
"blockchain",
"news",
"funny",
"science & technology",
"learning",
"gameplay",
"news & politics",
"comedy",
"bitcoin",
"beliefs",
"nature",
"art",
"economics",
"film & animation",
"lets play",
"games",
"sports",
"howto & style",
"game",
"cryptocurrency",
"playstation 4",
"automotive",
"crypto",
"mature",
"sony interactive entertainment",
"walkthrough",
"tutorial",
"video game",
"weapons",
"playthrough",
"pc",
"anime",
"how to",
"btc",
"fun",
"ethereum",
"food",
"travel & events",
"minecraft",
"science",
"autos & vehicles",
"play",
"politics",
"commentary",
"twitch",
"ps4live",
"love",
"ps4",
"nonprofits & activism",
"ps4share",
"fortnite",
"xbox",
"porn",
"video games",
"trump",
"español",
"money",
"music video",
"nintendo",
"movie",
"coronavirus",
"donald trump",
"steam",
"trailer",
"android",
"podcast",
"xbox one",
"survival",
"audio",
"linux",
"travel",
"funny moments",
"litecoin",
"animation",
"gamer",
"lets",
"playstation",
"bitcoin news",
"history",
"xxx",
"fox news",
"dance",
"god",
"adventure",
"liberal",
"2020",
"horror",
"government",
"freedom",
"reaction",
"meme",
"photography",
"truth",
"health",
"lbry",
"family",
"online",
"eth",
"crypto news",
"diy",
"trading",
"gold",
"memes",
"world",
"space",
"lol",
"covid-19",
"rpg",
"humor",
"democrat",
"film",
"call of duty",
"tech",
"religion",
"conspiracy",
"rap",
"cnn",
"hangoutsonair",
"unboxing",
"fiction",
"conservative",
"cars",
"hoa",
"epic",
"programming",
"progressive",
"cryptocurrency news",
"classical",
"jesus",
"movies",
"book",
"ps3",
"republican",
"fitness",
"books",
"multiplayer",
"animals",
"pokemon",
"bitcoin price",
"facebook",
"sharefactory",
"criptomonedas",
"cod",
"bible",
"business",
"stream",
"comics",
"how",
"fail",
"nsfw",
"new music",
"satire",
"pets & animals",
"computer",
"classical music",
"indie",
"musica",
"msnbc",
"fps",
"mod",
"sport",
"sony",
"ripple",
"auto",
"rock",
"marvel",
"complete",
"mining",
"political",
"mobile",
"pubg",
"hip hop",
"flat earth",
"xbox 360",
"reviews",
"vlogging",
"latest news",
"hack",
"tarot",
"iphone",
"media",
"cute",
"christian",
"free speech",
"trap",
"war",
"remix",
"ios",
"xrp",
"spirituality",
"song",
"league of legends",
"cat"
}
MATURE_TAGS = [
'nsfw', 'porn', 'xxx', 'mature', 'adult', 'sex'
]
def normalize_tag(tag):
return tag.replace(" ", "_").replace("&", "and").replace("-", "_")
COMMON_TAGS = {
tag: normalize_tag(tag) for tag in list(MOST_USED_TAGS)
}
INDEXED_LANGUAGES = [
'none',
'en',
'aa',
'ab',
'ae',
'af',
'ak',
'am',
'an',
'ar',
'as',
'av',
'ay',
'az',
'ba',
'be',
'bg',
'bh',
'bi',
'bm',
'bn',
'bo',
'br',
'bs',
'ca',
'ce',
'ch',
'co',
'cr',
'cs',
'cu',
'cv',
'cy',
'da',
'de',
'dv',
'dz',
'ee',
'el',
'eo',
'es',
'et',
'eu',
'fa',
'ff',
'fi',
'fj',
'fo',
'fr',
'fy',
'ga',
'gd',
'gl',
'gn',
'gu',
'gv',
'ha',
'he',
'hi',
'ho',
'hr',
'ht',
'hu',
'hy',
'hz',
'ia',
'id',
'ie',
'ig',
'ii',
'ik',
'io',
'is',
'it',
'iu',
'ja',
'jv',
'ka',
'kg',
'ki',
'kj',
'kk',
'kl',
'km',
'kn',
'ko',
'kr',
'ks',
'ku',
'kv',
'kw',
'ky',
'la',
'lb',
'lg',
'li',
'ln',
'lo',
'lt',
'lu',
'lv',
'mg',
'mh',
'mi',
'mk',
'ml',
'mn',
'mr',
'ms',
'mt',
'my',
'na',
'nb',
'nd',
'ne',
'ng',
'nl',
'nn',
'no',
'nr',
'nv',
'ny',
'oc',
'oj',
'om',
'or',
'os',
'pa',
'pi',
'pl',
'ps',
'pt',
'qu',
'rm',
'rn',
'ro',
'ru',
'rw',
'sa',
'sc',
'sd',
'se',
'sg',
'si',
'sk',
'sl',
'sm',
'sn',
'so',
'sq',
'sr',
'ss',
'st',
'su',
'sv',
'sw',
'ta',
'te',
'tg',
'th',
'ti',
'tk',
'tl',
'tn',
'to',
'tr',
'ts',
'tt',
'tw',
'ty',
'ug',
'uk',
'ur',
'uz',
've',
'vi',
'vo',
'wa',
'wo',
'xh',
'yi',
'yo',
'za',
'zh',
'zu'
]
class ResolveResult(typing.NamedTuple):
name: str
normalized_name: str
claim_hash: bytes
tx_num: int
position: int
tx_hash: bytes
height: int
amount: int
short_url: str
is_controlling: bool
canonical_url: str
creation_height: int
activation_height: int
expiration_height: int
effective_amount: int
support_amount: int
reposted: int
last_takeover_height: typing.Optional[int]
claims_in_channel: typing.Optional[int]
channel_hash: typing.Optional[bytes]
reposted_claim_hash: typing.Optional[bytes]
signature_valid: typing.Optional[bool]

View file

@ -1,119 +0,0 @@
import struct
from typing import Optional
from lbry.wallet.server.db import DB_PREFIXES
from lbry.wallet.server.db.revertable import RevertableOpStack, RevertablePut, RevertableDelete
class KeyValueStorage:
def get(self, key: bytes, fill_cache: bool = True) -> Optional[bytes]:
raise NotImplemented()
def iterator(self, reverse=False, start=None, stop=None, include_start=True, include_stop=False, prefix=None,
include_key=True, include_value=True, fill_cache=True):
raise NotImplemented()
def write_batch(self, transaction: bool = False):
raise NotImplemented()
def close(self):
raise NotImplemented()
@property
def closed(self) -> bool:
raise NotImplemented()
class PrefixDB:
UNDO_KEY_STRUCT = struct.Struct(b'>Q')
def __init__(self, db: KeyValueStorage, max_undo_depth: int = 200, unsafe_prefixes=None):
self._db = db
self._op_stack = RevertableOpStack(db.get, unsafe_prefixes=unsafe_prefixes)
self._max_undo_depth = max_undo_depth
def unsafe_commit(self):
"""
Write staged changes to the database without keeping undo information
Changes written cannot be undone
"""
try:
with self._db.write_batch(transaction=True) as batch:
batch_put = batch.put
batch_delete = batch.delete
for staged_change in self._op_stack:
if staged_change.is_put:
batch_put(staged_change.key, staged_change.value)
else:
batch_delete(staged_change.key)
finally:
self._op_stack.clear()
def commit(self, height: int):
"""
Write changes for a block height to the database and keep undo information so that the changes can be reverted
"""
undo_ops = self._op_stack.get_undo_ops()
delete_undos = []
if height > self._max_undo_depth:
delete_undos.extend(self._db.iterator(
start=DB_PREFIXES.undo.value + self.UNDO_KEY_STRUCT.pack(0),
stop=DB_PREFIXES.undo.value + self.UNDO_KEY_STRUCT.pack(height - self._max_undo_depth),
include_value=False
))
try:
with self._db.write_batch(transaction=True) as batch:
batch_put = batch.put
batch_delete = batch.delete
for staged_change in self._op_stack:
if staged_change.is_put:
batch_put(staged_change.key, staged_change.value)
else:
batch_delete(staged_change.key)
for undo_to_delete in delete_undos:
batch_delete(undo_to_delete)
batch_put(DB_PREFIXES.undo.value + self.UNDO_KEY_STRUCT.pack(height), undo_ops)
finally:
self._op_stack.clear()
def rollback(self, height: int):
"""
Revert changes for a block height
"""
undo_key = DB_PREFIXES.undo.value + self.UNDO_KEY_STRUCT.pack(height)
self._op_stack.apply_packed_undo_ops(self._db.get(undo_key))
try:
with self._db.write_batch(transaction=True) as batch:
batch_put = batch.put
batch_delete = batch.delete
for staged_change in self._op_stack:
if staged_change.is_put:
batch_put(staged_change.key, staged_change.value)
else:
batch_delete(staged_change.key)
batch_delete(undo_key)
finally:
self._op_stack.clear()
def get(self, key: bytes, fill_cache: bool = True) -> Optional[bytes]:
return self._db.get(key, fill_cache=fill_cache)
def iterator(self, reverse=False, start=None, stop=None, include_start=True, include_stop=False, prefix=None,
include_key=True, include_value=True, fill_cache=True):
return self._db.iterator(
reverse=reverse, start=start, stop=stop, include_start=include_start, include_stop=include_stop,
prefix=prefix, include_key=include_key, include_value=include_value, fill_cache=fill_cache
)
def close(self):
if not self._db.closed:
self._db.close()
@property
def closed(self):
return self._db.closed
def stage_raw_put(self, key: bytes, value: bytes):
self._op_stack.append_op(RevertablePut(key, value))
def stage_raw_delete(self, key: bytes, value: bytes):
self._op_stack.append_op(RevertableDelete(key, value))

View file

@ -1 +0,0 @@
from .search import SearchIndex

View file

@ -1,100 +0,0 @@
INDEX_DEFAULT_SETTINGS = {
"settings":
{"analysis":
{"analyzer": {
"default": {"tokenizer": "whitespace", "filter": ["lowercase", "porter_stem"]}}},
"index":
{"refresh_interval": -1,
"number_of_shards": 1,
"number_of_replicas": 0,
"sort": {
"field": ["trending_score", "release_time"],
"order": ["desc", "desc"]
}}
},
"mappings": {
"properties": {
"claim_id": {
"fields": {
"keyword": {
"ignore_above": 256,
"type": "keyword"
}
},
"type": "text",
"index_prefixes": {
"min_chars": 1,
"max_chars": 10
}
},
"sd_hash": {
"fields": {
"keyword": {
"ignore_above": 96,
"type": "keyword"
}
},
"type": "text",
"index_prefixes": {
"min_chars": 1,
"max_chars": 4
}
},
"height": {"type": "integer"},
"claim_type": {"type": "byte"},
"censor_type": {"type": "byte"},
"trending_score": {"type": "double"},
"release_time": {"type": "long"}
}
}
}
FIELDS = {
'_id',
'claim_id', 'claim_type', 'claim_name', 'normalized_name',
'tx_id', 'tx_nout', 'tx_position',
'short_url', 'canonical_url',
'is_controlling', 'last_take_over_height',
'public_key_bytes', 'public_key_id', 'claims_in_channel',
'channel_id', 'signature', 'signature_digest', 'is_signature_valid',
'amount', 'effective_amount', 'support_amount',
'fee_amount', 'fee_currency',
'height', 'creation_height', 'activation_height', 'expiration_height',
'stream_type', 'media_type', 'censor_type',
'title', 'author', 'description',
'timestamp', 'creation_timestamp',
'duration', 'release_time',
'tags', 'languages', 'has_source', 'reposted_claim_type',
'reposted_claim_id', 'repost_count', 'sd_hash',
'trending_score', 'tx_num'
}
TEXT_FIELDS = {'author', 'canonical_url', 'channel_id', 'description', 'claim_id', 'censoring_channel_id',
'media_type', 'normalized_name', 'public_key_bytes', 'public_key_id', 'short_url', 'signature',
'claim_name', 'signature_digest', 'title', 'tx_id', 'fee_currency', 'reposted_claim_id',
'tags', 'sd_hash'}
RANGE_FIELDS = {
'height', 'creation_height', 'activation_height', 'expiration_height',
'timestamp', 'creation_timestamp', 'duration', 'release_time', 'fee_amount',
'tx_position', 'repost_count', 'limit_claims_per_channel',
'amount', 'effective_amount', 'support_amount',
'trending_score', 'censor_type', 'tx_num'
}
ALL_FIELDS = RANGE_FIELDS | TEXT_FIELDS | FIELDS
REPLACEMENTS = {
'claim_name': 'normalized_name',
'name': 'normalized_name',
'txid': 'tx_id',
'nout': 'tx_nout',
'trending_group': 'trending_score',
'trending_mixed': 'trending_score',
'trending_global': 'trending_score',
'trending_local': 'trending_score',
'reposted': 'repost_count',
'stream_types': 'stream_type',
'media_types': 'media_type',
'valid_channel_signature': 'is_signature_valid'
}

View file

@ -1,726 +0,0 @@
import time
import asyncio
import struct
from binascii import unhexlify
from collections import Counter, deque
from decimal import Decimal
from operator import itemgetter
from typing import Optional, List, Iterable, Union
from elasticsearch import AsyncElasticsearch, NotFoundError, ConnectionError
from elasticsearch.helpers import async_streaming_bulk
from lbry.error import ResolveCensoredError, TooManyClaimSearchParametersError
from lbry.schema.result import Outputs, Censor
from lbry.schema.tags import clean_tags
from lbry.schema.url import URL, normalize_name
from lbry.utils import LRUCache
from lbry.wallet.server.db.common import CLAIM_TYPES, STREAM_TYPES
from lbry.wallet.server.db.elasticsearch.constants import INDEX_DEFAULT_SETTINGS, REPLACEMENTS, FIELDS, TEXT_FIELDS, \
RANGE_FIELDS, ALL_FIELDS
from lbry.wallet.server.util import class_logger
from lbry.wallet.server.db.common import ResolveResult
class ChannelResolution(str):
@classmethod
def lookup_error(cls, url):
return LookupError(f'Could not find channel in "{url}".')
class StreamResolution(str):
@classmethod
def lookup_error(cls, url):
return LookupError(f'Could not find claim at "{url}".')
class IndexVersionMismatch(Exception):
def __init__(self, got_version, expected_version):
self.got_version = got_version
self.expected_version = expected_version
class SearchIndex:
VERSION = 1
def __init__(self, index_prefix: str, search_timeout=3.0, elastic_host='localhost', elastic_port=9200):
self.search_timeout = search_timeout
self.sync_timeout = 600 # wont hit that 99% of the time, but can hit on a fresh import
self.search_client: Optional[AsyncElasticsearch] = None
self.sync_client: Optional[AsyncElasticsearch] = None
self.index = index_prefix + 'claims'
self.logger = class_logger(__name__, self.__class__.__name__)
self.claim_cache = LRUCache(2 ** 15)
self.search_cache = LRUCache(2 ** 17)
self._elastic_host = elastic_host
self._elastic_port = elastic_port
async def get_index_version(self) -> int:
try:
template = await self.sync_client.indices.get_template(self.index)
return template[self.index]['version']
except NotFoundError:
return 0
async def set_index_version(self, version):
await self.sync_client.indices.put_template(
self.index, body={'version': version, 'index_patterns': ['ignored']}, ignore=400
)
async def start(self) -> bool:
if self.sync_client:
return False
hosts = [{'host': self._elastic_host, 'port': self._elastic_port}]
self.sync_client = AsyncElasticsearch(hosts, timeout=self.sync_timeout)
self.search_client = AsyncElasticsearch(hosts, timeout=self.search_timeout)
while True:
try:
await self.sync_client.cluster.health(wait_for_status='yellow')
break
except ConnectionError:
self.logger.warning("Failed to connect to Elasticsearch. Waiting for it!")
await asyncio.sleep(1)
res = await self.sync_client.indices.create(self.index, INDEX_DEFAULT_SETTINGS, ignore=400)
acked = res.get('acknowledged', False)
if acked:
await self.set_index_version(self.VERSION)
return acked
index_version = await self.get_index_version()
if index_version != self.VERSION:
self.logger.error("es search index has an incompatible version: %s vs %s", index_version, self.VERSION)
raise IndexVersionMismatch(index_version, self.VERSION)
await self.sync_client.indices.refresh(self.index)
return acked
def stop(self):
clients = [self.sync_client, self.search_client]
self.sync_client, self.search_client = None, None
return asyncio.ensure_future(asyncio.gather(*(client.close() for client in clients)))
def delete_index(self):
return self.sync_client.indices.delete(self.index, ignore_unavailable=True)
async def _consume_claim_producer(self, claim_producer):
count = 0
async for op, doc in claim_producer:
if op == 'delete':
yield {
'_index': self.index,
'_op_type': 'delete',
'_id': doc
}
else:
yield {
'doc': {key: value for key, value in doc.items() if key in ALL_FIELDS},
'_id': doc['claim_id'],
'_index': self.index,
'_op_type': 'update',
'doc_as_upsert': True
}
count += 1
if count % 100 == 0:
self.logger.info("Indexing in progress, %d claims.", count)
if count:
self.logger.info("Indexing done for %d claims.", count)
else:
self.logger.debug("Indexing done for %d claims.", count)
async def claim_consumer(self, claim_producer):
touched = set()
async for ok, item in async_streaming_bulk(self.sync_client, self._consume_claim_producer(claim_producer),
raise_on_error=False):
if not ok:
self.logger.warning("indexing failed for an item: %s", item)
else:
item = item.popitem()[1]
touched.add(item['_id'])
await self.sync_client.indices.refresh(self.index)
self.logger.debug("Indexing done.")
def update_filter_query(self, censor_type, blockdict, channels=False):
blockdict = {blocked.hex(): blocker.hex() for blocked, blocker in blockdict.items()}
if channels:
update = expand_query(channel_id__in=list(blockdict.keys()), censor_type=f"<{censor_type}")
else:
update = expand_query(claim_id__in=list(blockdict.keys()), censor_type=f"<{censor_type}")
key = 'channel_id' if channels else 'claim_id'
update['script'] = {
"source": f"ctx._source.censor_type={censor_type}; "
f"ctx._source.censoring_channel_id=params[ctx._source.{key}];",
"lang": "painless",
"params": blockdict
}
return update
async def update_trending_score(self, params):
update_trending_score_script = """
double softenLBC(double lbc) { return (Math.pow(lbc, 1.0 / 3.0)); }
double logsumexp(double x, double y)
{
double top;
if(x > y)
top = x;
else
top = y;
double result = top + Math.log(Math.exp(x-top) + Math.exp(y-top));
return(result);
}
double logdiffexp(double big, double small)
{
return big + Math.log(1.0 - Math.exp(small - big));
}
double squash(double x)
{
if(x < 0.0)
return -Math.log(1.0 - x);
else
return Math.log(x + 1.0);
}
double unsquash(double x)
{
if(x < 0.0)
return 1.0 - Math.exp(-x);
else
return Math.exp(x) - 1.0;
}
double log_to_squash(double x)
{
return logsumexp(x, 0.0);
}
double squash_to_log(double x)
{
//assert x > 0.0;
return logdiffexp(x, 0.0);
}
double squashed_add(double x, double y)
{
// squash(unsquash(x) + unsquash(y)) but avoiding overflow.
// Cases where the signs are the same
if (x < 0.0 && y < 0.0)
return -logsumexp(-x, logdiffexp(-y, 0.0));
if (x >= 0.0 && y >= 0.0)
return logsumexp(x, logdiffexp(y, 0.0));
// Where the signs differ
if (x >= 0.0 && y < 0.0)
if (Math.abs(x) >= Math.abs(y))
return logsumexp(0.0, logdiffexp(x, -y));
else
return -logsumexp(0.0, logdiffexp(-y, x));
if (x < 0.0 && y >= 0.0)
{
// Addition is commutative, hooray for new math
return squashed_add(y, x);
}
return 0.0;
}
double squashed_multiply(double x, double y)
{
// squash(unsquash(x)*unsquash(y)) but avoiding overflow.
int sign;
if(x*y >= 0.0)
sign = 1;
else
sign = -1;
return sign*logsumexp(squash_to_log(Math.abs(x))
+ squash_to_log(Math.abs(y)), 0.0);
}
// Squashed inflated units
double inflateUnits(int height) {
double timescale = 576.0; // Half life of 400 = e-folding time of a day
// by coincidence, so may as well go with it
return log_to_squash(height / timescale);
}
double spikePower(double newAmount) {
if (newAmount < 50.0) {
return(0.5);
} else if (newAmount < 85.0) {
return(newAmount / 100.0);
} else {
return(0.85);
}
}
double spikeMass(double oldAmount, double newAmount) {
double softenedChange = softenLBC(Math.abs(newAmount - oldAmount));
double changeInSoftened = Math.abs(softenLBC(newAmount) - softenLBC(oldAmount));
double power = spikePower(newAmount);
if (oldAmount > newAmount) {
-1.0 * Math.pow(changeInSoftened, power) * Math.pow(softenedChange, 1.0 - power)
} else {
Math.pow(changeInSoftened, power) * Math.pow(softenedChange, 1.0 - power)
}
}
for (i in params.src.changes) {
double units = inflateUnits(i.height);
if (ctx._source.trending_score == null) {
ctx._source.trending_score = 0.0;
}
double bigSpike = squashed_multiply(units, squash(spikeMass(i.prev_amount, i.new_amount)));
ctx._source.trending_score = squashed_add(ctx._source.trending_score, bigSpike);
}
"""
start = time.perf_counter()
def producer():
for claim_id, claim_updates in params.items():
yield {
'_id': claim_id,
'_index': self.index,
'_op_type': 'update',
'script': {
'lang': 'painless',
'source': update_trending_score_script,
'params': {'src': {
'changes': [
{
'height': p.height,
'prev_amount': p.prev_amount / 1E8,
'new_amount': p.new_amount / 1E8,
} for p in claim_updates
]
}}
},
}
if not params:
return
async for ok, item in async_streaming_bulk(self.sync_client, producer(), raise_on_error=False):
if not ok:
self.logger.warning("updating trending failed for an item: %s", item)
await self.sync_client.indices.refresh(self.index)
self.logger.info("updated trending scores in %ims", int((time.perf_counter() - start) * 1000))
async def apply_filters(self, blocked_streams, blocked_channels, filtered_streams, filtered_channels):
if filtered_streams:
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.SEARCH, filtered_streams), slices=4)
await self.sync_client.indices.refresh(self.index)
if filtered_channels:
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.SEARCH, filtered_channels), slices=4)
await self.sync_client.indices.refresh(self.index)
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.SEARCH, filtered_channels, True), slices=4)
await self.sync_client.indices.refresh(self.index)
if blocked_streams:
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.RESOLVE, blocked_streams), slices=4)
await self.sync_client.indices.refresh(self.index)
if blocked_channels:
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.RESOLVE, blocked_channels), slices=4)
await self.sync_client.indices.refresh(self.index)
await self.sync_client.update_by_query(
self.index, body=self.update_filter_query(Censor.RESOLVE, blocked_channels, True), slices=4)
await self.sync_client.indices.refresh(self.index)
self.clear_caches()
def clear_caches(self):
self.search_cache.clear()
self.claim_cache.clear()
async def cached_search(self, kwargs):
total_referenced = []
cache_item = ResultCacheItem.from_cache(str(kwargs), self.search_cache)
if cache_item.result is not None:
return cache_item.result
async with cache_item.lock:
if cache_item.result:
return cache_item.result
censor = Censor(Censor.SEARCH)
if kwargs.get('no_totals'):
response, offset, total = await self.search(**kwargs, censor_type=Censor.NOT_CENSORED)
else:
response, offset, total = await self.search(**kwargs)
censor.apply(response)
total_referenced.extend(response)
if censor.censored:
response, _, _ = await self.search(**kwargs, censor_type=Censor.NOT_CENSORED)
total_referenced.extend(response)
response = [
ResolveResult(
name=r['claim_name'],
normalized_name=r['normalized_name'],
claim_hash=r['claim_hash'],
tx_num=r['tx_num'],
position=r['tx_nout'],
tx_hash=r['tx_hash'],
height=r['height'],
amount=r['amount'],
short_url=r['short_url'],
is_controlling=r['is_controlling'],
canonical_url=r['canonical_url'],
creation_height=r['creation_height'],
activation_height=r['activation_height'],
expiration_height=r['expiration_height'],
effective_amount=r['effective_amount'],
support_amount=r['support_amount'],
last_takeover_height=r['last_take_over_height'],
claims_in_channel=r['claims_in_channel'],
channel_hash=r['channel_hash'],
reposted_claim_hash=r['reposted_claim_hash'],
reposted=r['reposted'],
signature_valid=r['signature_valid']
) for r in response
]
extra = [
ResolveResult(
name=r['claim_name'],
normalized_name=r['normalized_name'],
claim_hash=r['claim_hash'],
tx_num=r['tx_num'],
position=r['tx_nout'],
tx_hash=r['tx_hash'],
height=r['height'],
amount=r['amount'],
short_url=r['short_url'],
is_controlling=r['is_controlling'],
canonical_url=r['canonical_url'],
creation_height=r['creation_height'],
activation_height=r['activation_height'],
expiration_height=r['expiration_height'],
effective_amount=r['effective_amount'],
support_amount=r['support_amount'],
last_takeover_height=r['last_take_over_height'],
claims_in_channel=r['claims_in_channel'],
channel_hash=r['channel_hash'],
reposted_claim_hash=r['reposted_claim_hash'],
reposted=r['reposted'],
signature_valid=r['signature_valid']
) for r in await self._get_referenced_rows(total_referenced)
]
result = Outputs.to_base64(
response, extra, offset, total, censor
)
cache_item.result = result
return result
async def get_many(self, *claim_ids):
await self.populate_claim_cache(*claim_ids)
return filter(None, map(self.claim_cache.get, claim_ids))
async def populate_claim_cache(self, *claim_ids):
missing = [claim_id for claim_id in claim_ids if self.claim_cache.get(claim_id) is None]
if missing:
results = await self.search_client.mget(
index=self.index, body={"ids": missing}
)
for result in expand_result(filter(lambda doc: doc['found'], results["docs"])):
self.claim_cache.set(result['claim_id'], result)
async def search(self, **kwargs):
try:
return await self.search_ahead(**kwargs)
except NotFoundError:
return [], 0, 0
# return expand_result(result['hits']), 0, result.get('total', {}).get('value', 0)
async def search_ahead(self, **kwargs):
# 'limit_claims_per_channel' case. Fetch 1000 results, reorder, slice, inflate and return
per_channel_per_page = kwargs.pop('limit_claims_per_channel', 0) or 0
remove_duplicates = kwargs.pop('remove_duplicates', False)
page_size = kwargs.pop('limit', 10)
offset = kwargs.pop('offset', 0)
kwargs['limit'] = 1000
cache_item = ResultCacheItem.from_cache(f"ahead{per_channel_per_page}{kwargs}", self.search_cache)
if cache_item.result is not None:
reordered_hits = cache_item.result
else:
async with cache_item.lock:
if cache_item.result:
reordered_hits = cache_item.result
else:
query = expand_query(**kwargs)
search_hits = deque((await self.search_client.search(
query, index=self.index, track_total_hits=False,
_source_includes=['_id', 'channel_id', 'reposted_claim_id', 'creation_height']
))['hits']['hits'])
if remove_duplicates:
search_hits = self.__remove_duplicates(search_hits)
if per_channel_per_page > 0:
reordered_hits = self.__search_ahead(search_hits, page_size, per_channel_per_page)
else:
reordered_hits = [(hit['_id'], hit['_source']['channel_id']) for hit in search_hits]
cache_item.result = reordered_hits
result = list(await self.get_many(*(claim_id for claim_id, _ in reordered_hits[offset:(offset + page_size)])))
return result, 0, len(reordered_hits)
def __remove_duplicates(self, search_hits: deque) -> deque:
known_ids = {} # claim_id -> (creation_height, hit_id), where hit_id is either reposted claim id or original
dropped = set()
for hit in search_hits:
hit_height, hit_id = hit['_source']['creation_height'], hit['_source']['reposted_claim_id'] or hit['_id']
if hit_id not in known_ids:
known_ids[hit_id] = (hit_height, hit['_id'])
else:
previous_height, previous_id = known_ids[hit_id]
if hit_height < previous_height:
known_ids[hit_id] = (hit_height, hit['_id'])
dropped.add(previous_id)
else:
dropped.add(hit['_id'])
return deque(hit for hit in search_hits if hit['_id'] not in dropped)
def __search_ahead(self, search_hits: list, page_size: int, per_channel_per_page: int):
reordered_hits = []
channel_counters = Counter()
next_page_hits_maybe_check_later = deque()
while search_hits or next_page_hits_maybe_check_later:
if reordered_hits and len(reordered_hits) % page_size == 0:
channel_counters.clear()
elif not reordered_hits:
pass
else:
break # means last page was incomplete and we are left with bad replacements
for _ in range(len(next_page_hits_maybe_check_later)):
claim_id, channel_id = next_page_hits_maybe_check_later.popleft()
if per_channel_per_page > 0 and channel_counters[channel_id] < per_channel_per_page:
reordered_hits.append((claim_id, channel_id))
channel_counters[channel_id] += 1
else:
next_page_hits_maybe_check_later.append((claim_id, channel_id))
while search_hits:
hit = search_hits.popleft()
hit_id, hit_channel_id = hit['_id'], hit['_source']['channel_id']
if hit_channel_id is None or per_channel_per_page <= 0:
reordered_hits.append((hit_id, hit_channel_id))
elif channel_counters[hit_channel_id] < per_channel_per_page:
reordered_hits.append((hit_id, hit_channel_id))
channel_counters[hit_channel_id] += 1
if len(reordered_hits) % page_size == 0:
break
else:
next_page_hits_maybe_check_later.append((hit_id, hit_channel_id))
return reordered_hits
async def _get_referenced_rows(self, txo_rows: List[dict]):
txo_rows = [row for row in txo_rows if isinstance(row, dict)]
referenced_ids = set(filter(None, map(itemgetter('reposted_claim_id'), txo_rows)))
referenced_ids |= set(filter(None, (row['channel_id'] for row in txo_rows)))
referenced_ids |= set(filter(None, (row['censoring_channel_id'] for row in txo_rows)))
referenced_txos = []
if referenced_ids:
referenced_txos.extend(await self.get_many(*referenced_ids))
referenced_ids = set(filter(None, (row['channel_id'] for row in referenced_txos)))
if referenced_ids:
referenced_txos.extend(await self.get_many(*referenced_ids))
return referenced_txos
def expand_query(**kwargs):
if "amount_order" in kwargs:
kwargs["limit"] = 1
kwargs["order_by"] = "effective_amount"
kwargs["offset"] = int(kwargs["amount_order"]) - 1
if 'name' in kwargs:
kwargs['name'] = normalize_name(kwargs.pop('name'))
if kwargs.get('is_controlling') is False:
kwargs.pop('is_controlling')
query = {'must': [], 'must_not': []}
collapse = None
if 'fee_currency' in kwargs and kwargs['fee_currency'] is not None:
kwargs['fee_currency'] = kwargs['fee_currency'].upper()
for key, value in kwargs.items():
key = key.replace('claim.', '')
many = key.endswith('__in') or isinstance(value, list)
if many and len(value) > 2048:
raise TooManyClaimSearchParametersError(key, 2048)
if many:
key = key.replace('__in', '')
value = list(filter(None, value))
if value is None or isinstance(value, list) and len(value) == 0:
continue
key = REPLACEMENTS.get(key, key)
if key in FIELDS:
partial_id = False
if key == 'claim_type':
if isinstance(value, str):
value = CLAIM_TYPES[value]
else:
value = [CLAIM_TYPES[claim_type] for claim_type in value]
elif key == 'stream_type':
value = [STREAM_TYPES[value]] if isinstance(value, str) else list(map(STREAM_TYPES.get, value))
if key == '_id':
if isinstance(value, Iterable):
value = [item[::-1].hex() for item in value]
else:
value = value[::-1].hex()
if not many and key in ('_id', 'claim_id', 'sd_hash') and len(value) < 20:
partial_id = True
if key in ('signature_valid', 'has_source'):
continue # handled later
if key in TEXT_FIELDS:
key += '.keyword'
ops = {'<=': 'lte', '>=': 'gte', '<': 'lt', '>': 'gt'}
if partial_id:
query['must'].append({"prefix": {key: value}})
elif key in RANGE_FIELDS and isinstance(value, str) and value[0] in ops:
operator_length = 2 if value[:2] in ops else 1
operator, value = value[:operator_length], value[operator_length:]
if key == 'fee_amount':
value = str(Decimal(value)*1000)
query['must'].append({"range": {key: {ops[operator]: value}}})
elif key in RANGE_FIELDS and isinstance(value, list) and all(v[0] in ops for v in value):
range_constraints = []
for v in value:
operator_length = 2 if v[:2] in ops else 1
operator, stripped_op_v = v[:operator_length], v[operator_length:]
if key == 'fee_amount':
stripped_op_v = str(Decimal(stripped_op_v)*1000)
range_constraints.append((operator, stripped_op_v))
query['must'].append({"range": {key: {ops[operator]: v for operator, v in range_constraints}}})
elif many:
query['must'].append({"terms": {key: value}})
else:
if key == 'fee_amount':
value = str(Decimal(value)*1000)
query['must'].append({"term": {key: {"value": value}}})
elif key == 'not_channel_ids':
for channel_id in value:
query['must_not'].append({"term": {'channel_id.keyword': channel_id}})
query['must_not'].append({"term": {'_id': channel_id}})
elif key == 'channel_ids':
query['must'].append({"terms": {'channel_id.keyword': value}})
elif key == 'claim_ids':
query['must'].append({"terms": {'claim_id.keyword': value}})
elif key == 'media_types':
query['must'].append({"terms": {'media_type.keyword': value}})
elif key == 'any_languages':
query['must'].append({"terms": {'languages': clean_tags(value)}})
elif key == 'any_languages':
query['must'].append({"terms": {'languages': value}})
elif key == 'all_languages':
query['must'].extend([{"term": {'languages': tag}} for tag in value])
elif key == 'any_tags':
query['must'].append({"terms": {'tags.keyword': clean_tags(value)}})
elif key == 'all_tags':
query['must'].extend([{"term": {'tags.keyword': tag}} for tag in clean_tags(value)])
elif key == 'not_tags':
query['must_not'].extend([{"term": {'tags.keyword': tag}} for tag in clean_tags(value)])
elif key == 'not_claim_id':
query['must_not'].extend([{"term": {'claim_id.keyword': cid}} for cid in value])
elif key == 'limit_claims_per_channel':
collapse = ('channel_id.keyword', value)
if kwargs.get('has_channel_signature'):
query['must'].append({"exists": {"field": "signature"}})
if 'signature_valid' in kwargs:
query['must'].append({"term": {"is_signature_valid": bool(kwargs["signature_valid"])}})
elif 'signature_valid' in kwargs:
query.setdefault('should', [])
query["minimum_should_match"] = 1
query['should'].append({"bool": {"must_not": {"exists": {"field": "signature"}}}})
query['should'].append({"term": {"is_signature_valid": bool(kwargs["signature_valid"])}})
if 'has_source' in kwargs:
query.setdefault('should', [])
query["minimum_should_match"] = 1
is_stream_or_repost = {"terms": {"claim_type": [CLAIM_TYPES['stream'], CLAIM_TYPES['repost']]}}
query['should'].append(
{"bool": {"must": [{"match": {"has_source": kwargs['has_source']}}, is_stream_or_repost]}})
query['should'].append({"bool": {"must_not": [is_stream_or_repost]}})
query['should'].append({"bool": {"must": [{"term": {"reposted_claim_type": CLAIM_TYPES['channel']}}]}})
if kwargs.get('text'):
query['must'].append(
{"simple_query_string":
{"query": kwargs["text"], "fields": [
"claim_name^4", "channel_name^8", "title^1", "description^.5", "author^1", "tags^.5"
]}})
query = {
"_source": {"excludes": ["description", "title"]},
'query': {'bool': query},
"sort": [],
}
if "limit" in kwargs:
query["size"] = kwargs["limit"]
if 'offset' in kwargs:
query["from"] = kwargs["offset"]
if 'order_by' in kwargs:
if isinstance(kwargs["order_by"], str):
kwargs["order_by"] = [kwargs["order_by"]]
for value in kwargs['order_by']:
if 'trending_group' in value:
# fixme: trending_mixed is 0 for all records on variable decay, making sort slow.
continue
is_asc = value.startswith('^')
value = value[1:] if is_asc else value
value = REPLACEMENTS.get(value, value)
if value in TEXT_FIELDS:
value += '.keyword'
query['sort'].append({value: "asc" if is_asc else "desc"})
if collapse:
query["collapse"] = {
"field": collapse[0],
"inner_hits": {
"name": collapse[0],
"size": collapse[1],
"sort": query["sort"]
}
}
return query
def expand_result(results):
inner_hits = []
expanded = []
for result in results:
if result.get("inner_hits"):
for _, inner_hit in result["inner_hits"].items():
inner_hits.extend(inner_hit["hits"]["hits"])
continue
result = result['_source']
result['claim_hash'] = unhexlify(result['claim_id'])[::-1]
if result['reposted_claim_id']:
result['reposted_claim_hash'] = unhexlify(result['reposted_claim_id'])[::-1]
else:
result['reposted_claim_hash'] = None
result['channel_hash'] = unhexlify(result['channel_id'])[::-1] if result['channel_id'] else None
result['txo_hash'] = unhexlify(result['tx_id'])[::-1] + struct.pack('<I', result['tx_nout'])
result['tx_hash'] = unhexlify(result['tx_id'])[::-1]
result['reposted'] = result.pop('repost_count')
result['signature_valid'] = result.pop('is_signature_valid')
# result['normalized'] = result.pop('normalized_name')
# if result['censoring_channel_hash']:
# result['censoring_channel_hash'] = unhexlify(result['censoring_channel_hash'])[::-1]
expanded.append(result)
if inner_hits:
return expand_result(inner_hits)
return expanded
class ResultCacheItem:
__slots__ = '_result', 'lock', 'has_result'
def __init__(self):
self.has_result = asyncio.Event()
self.lock = asyncio.Lock()
self._result = None
@property
def result(self) -> str:
return self._result
@result.setter
def result(self, result: str):
self._result = result
if result is not None:
self.has_result.set()
@classmethod
def from_cache(cls, cache_key, cache):
cache_item = cache.get(cache_key)
if cache_item is None:
cache_item = cache[cache_key] = ResultCacheItem()
return cache_item

View file

@ -1,138 +0,0 @@
import os
import argparse
import asyncio
import logging
from elasticsearch import AsyncElasticsearch
from elasticsearch.helpers import async_streaming_bulk
from lbry.wallet.server.env import Env
from lbry.wallet.server.leveldb import LevelDB
from lbry.wallet.server.db.elasticsearch.search import SearchIndex, IndexVersionMismatch
from lbry.wallet.server.db.elasticsearch.constants import ALL_FIELDS
async def get_recent_claims(env, index_name='claims', db=None):
log = logging.getLogger()
need_open = db is None
db = db or LevelDB(env)
try:
if need_open:
db.open_db()
if db.es_sync_height == db.db_height or db.db_height <= 0:
return
if need_open:
await db.initialize_caches()
log.info(f"catching up ES ({db.es_sync_height}) to leveldb height: {db.db_height}")
cnt = 0
touched_claims = set()
deleted_claims = set()
for height in range(db.es_sync_height, db.db_height + 1):
touched_or_deleted = db.prefix_db.touched_or_deleted.get(height)
touched_claims.update(touched_or_deleted.touched_claims)
deleted_claims.update(touched_or_deleted.deleted_claims)
touched_claims.difference_update(deleted_claims)
for deleted in deleted_claims:
yield {
'_index': index_name,
'_op_type': 'delete',
'_id': deleted.hex()
}
for touched in touched_claims:
claim = db.claim_producer(touched)
if claim:
yield {
'doc': {key: value for key, value in claim.items() if key in ALL_FIELDS},
'_id': claim['claim_id'],
'_index': index_name,
'_op_type': 'update',
'doc_as_upsert': True
}
cnt += 1
else:
logging.warning("could not sync claim %s", touched.hex())
if cnt % 10000 == 0:
logging.info("%i claims sent to ES", cnt)
db.es_sync_height = db.db_height
db.write_db_state()
db.prefix_db.unsafe_commit()
db.assert_db_state()
logging.info("finished sending %i claims to ES, deleted %i", cnt, len(deleted_claims))
finally:
if need_open:
db.close()
async def get_all_claims(env, index_name='claims', db=None):
need_open = db is None
db = db or LevelDB(env)
if need_open:
db.open_db()
await db.initialize_caches()
logging.info("Fetching claims to send ES from leveldb")
try:
cnt = 0
async for claim in db.all_claims_producer():
yield {
'doc': {key: value for key, value in claim.items() if key in ALL_FIELDS},
'_id': claim['claim_id'],
'_index': index_name,
'_op_type': 'update',
'doc_as_upsert': True
}
cnt += 1
if cnt % 10000 == 0:
logging.info("sent %i claims to ES", cnt)
finally:
if need_open:
db.close()
async def make_es_index_and_run_sync(env: Env, clients=32, force=False, db=None, index_name='claims'):
index = SearchIndex(env.es_index_prefix, elastic_host=env.elastic_host, elastic_port=env.elastic_port)
logging.info("ES sync host: %s:%i", env.elastic_host, env.elastic_port)
try:
created = await index.start()
except IndexVersionMismatch as err:
logging.info(
"dropping ES search index (version %s) for upgrade to version %s", err.got_version, err.expected_version
)
await index.delete_index()
await index.stop()
created = await index.start()
finally:
index.stop()
es = AsyncElasticsearch([{'host': env.elastic_host, 'port': env.elastic_port}])
if force or created:
claim_generator = get_all_claims(env, index_name=index_name, db=db)
else:
claim_generator = get_recent_claims(env, index_name=index_name, db=db)
try:
async for ok, item in async_streaming_bulk(es, claim_generator, request_timeout=600, raise_on_error=False):
if not ok:
logging.warning("indexing failed for an item: %s", item)
await es.indices.refresh(index=index_name)
finally:
await es.close()
def run_elastic_sync():
logging.basicConfig(level=logging.INFO)
logging.getLogger('aiohttp').setLevel(logging.WARNING)
logging.getLogger('elasticsearch').setLevel(logging.WARNING)
logging.info('lbry.server starting')
parser = argparse.ArgumentParser(prog="lbry-hub-elastic-sync")
parser.add_argument("-c", "--clients", type=int, default=32)
parser.add_argument("-f", "--force", default=False, action='store_true')
Env.contribute_to_arg_parser(parser)
args = parser.parse_args()
env = Env.from_arg_parser(args)
if not os.path.exists(os.path.join(args.db_dir, 'lbry-leveldb')):
logging.info("DB path doesnt exist, nothing to sync to ES")
return
asyncio.run(make_es_index_and_run_sync(env, clients=args.clients, force=args.force))

File diff suppressed because it is too large Load diff

View file

@ -1,175 +0,0 @@
import struct
import logging
from string import printable
from collections import defaultdict
from typing import Tuple, Iterable, Callable, Optional
from lbry.wallet.server.db import DB_PREFIXES
_OP_STRUCT = struct.Struct('>BLL')
log = logging.getLogger()
class RevertableOp:
__slots__ = [
'key',
'value',
]
is_put = 0
def __init__(self, key: bytes, value: bytes):
self.key = key
self.value = value
@property
def is_delete(self) -> bool:
return not self.is_put
def invert(self) -> 'RevertableOp':
raise NotImplementedError()
def pack(self) -> bytes:
"""
Serialize to bytes
"""
return struct.pack(
f'>BLL{len(self.key)}s{len(self.value)}s', int(self.is_put), len(self.key), len(self.value), self.key,
self.value
)
@classmethod
def unpack(cls, packed: bytes) -> Tuple['RevertableOp', bytes]:
"""
Deserialize from bytes
:param packed: bytes containing at least one packed revertable op
:return: tuple of the deserialized op (a put or a delete) and the remaining serialized bytes
"""
is_put, key_len, val_len = _OP_STRUCT.unpack(packed[:9])
key = packed[9:9 + key_len]
value = packed[9 + key_len:9 + key_len + val_len]
if is_put == 1:
return RevertablePut(key, value), packed[9 + key_len + val_len:]
return RevertableDelete(key, value), packed[9 + key_len + val_len:]
def __eq__(self, other: 'RevertableOp') -> bool:
return (self.is_put, self.key, self.value) == (other.is_put, other.key, other.value)
def __repr__(self) -> str:
return str(self)
def __str__(self) -> str:
from lbry.wallet.server.db.prefixes import auto_decode_item
k, v = auto_decode_item(self.key, self.value)
key = ''.join(c if c in printable else '.' for c in str(k))
val = ''.join(c if c in printable else '.' for c in str(v))
return f"{'PUT' if self.is_put else 'DELETE'} {DB_PREFIXES(self.key[:1]).name}: {key} | {val}"
class RevertableDelete(RevertableOp):
def invert(self):
return RevertablePut(self.key, self.value)
class RevertablePut(RevertableOp):
is_put = True
def invert(self):
return RevertableDelete(self.key, self.value)
class OpStackIntegrity(Exception):
pass
class RevertableOpStack:
def __init__(self, get_fn: Callable[[bytes], Optional[bytes]], unsafe_prefixes=None):
"""
This represents a sequence of revertable puts and deletes to a key-value database that checks for integrity
violations when applying the puts and deletes. The integrity checks assure that keys that do not exist
are not deleted, and that when keys are deleted the current value is correctly known so that the delete
may be undone. When putting values, the integrity checks assure that existing values are not overwritten
without first being deleted. Updates are performed by applying a delete op for the old value and a put op
for the new value.
:param get_fn: getter function from an object implementing `KeyValueStorage`
:param unsafe_prefixes: optional set of prefixes to ignore integrity errors for, violations are still logged
"""
self._get = get_fn
self._items = defaultdict(list)
self._unsafe_prefixes = unsafe_prefixes or set()
def append_op(self, op: RevertableOp):
"""
Apply a put or delete op, checking that it introduces no integrity errors
"""
inverted = op.invert()
if self._items[op.key] and inverted == self._items[op.key][-1]:
self._items[op.key].pop() # if the new op is the inverse of the last op, we can safely null both
return
elif self._items[op.key] and self._items[op.key][-1] == op: # duplicate of last op
return # raise an error?
stored_val = self._get(op.key)
has_stored_val = stored_val is not None
delete_stored_op = None if not has_stored_val else RevertableDelete(op.key, stored_val)
will_delete_existing_stored = False if delete_stored_op is None else (delete_stored_op in self._items[op.key])
try:
if op.is_put and has_stored_val and not will_delete_existing_stored:
raise OpStackIntegrity(
f"db op tries to add on top of existing key without deleting first: {op}"
)
elif op.is_delete and has_stored_val and stored_val != op.value and not will_delete_existing_stored:
# there is a value and we're not deleting it in this op
# check that a delete for the stored value is in the stack
raise OpStackIntegrity(f"db op tries to delete with incorrect existing value {op}")
elif op.is_delete and not has_stored_val:
raise OpStackIntegrity(f"db op tries to delete nonexistent key: {op}")
elif op.is_delete and stored_val != op.value:
raise OpStackIntegrity(f"db op tries to delete with incorrect value: {op}")
except OpStackIntegrity as err:
if op.key[:1] in self._unsafe_prefixes:
log.debug(f"skipping over integrity error: {err}")
else:
raise err
self._items[op.key].append(op)
def extend_ops(self, ops: Iterable[RevertableOp]):
"""
Apply a sequence of put or delete ops, checking that they introduce no integrity errors
"""
for op in ops:
self.append_op(op)
def clear(self):
self._items.clear()
def __len__(self):
return sum(map(len, self._items.values()))
def __iter__(self):
for key, ops in self._items.items():
for op in ops:
yield op
def __reversed__(self):
for key, ops in self._items.items():
for op in reversed(ops):
yield op
def get_undo_ops(self) -> bytes:
"""
Get the serialized bytes to undo all of the changes made by the pending ops
"""
return b''.join(op.invert().pack() for op in reversed(self))
def apply_packed_undo_ops(self, packed: bytes):
"""
Unpack and apply a sequence of undo ops from serialized undo bytes
"""
while packed:
op, packed = RevertableOp.unpack(packed)
self.append_op(op)
def get_last_op_for_key(self, key: bytes) -> Optional[RevertableOp]:
if key in self._items and self._items[key]:
return self._items[key][-1]

View file

@ -1,384 +0,0 @@
# Copyright (c) 2016, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
import math
import re
import resource
from os import environ
from collections import namedtuple
from ipaddress import ip_address
from lbry.wallet.server.util import class_logger
from lbry.wallet.server.coin import Coin, LBC, LBCTestNet, LBCRegTest
import lbry.wallet.server.util as lib_util
NetIdentity = namedtuple('NetIdentity', 'host tcp_port ssl_port nick_suffix')
class Env:
# Peer discovery
PD_OFF, PD_SELF, PD_ON = range(3)
class Error(Exception):
pass
def __init__(self, coin=None, db_dir=None, daemon_url=None, host=None, rpc_host=None, elastic_host=None,
elastic_port=None, loop_policy=None, max_query_workers=None, websocket_host=None, websocket_port=None,
chain=None, es_index_prefix=None, es_mode=None, cache_MB=None, reorg_limit=None, tcp_port=None,
udp_port=None, ssl_port=None, ssl_certfile=None, ssl_keyfile=None, rpc_port=None,
prometheus_port=None, max_subscriptions=None, banner_file=None, anon_logs=None, log_sessions=None,
allow_lan_udp=None, cache_all_tx_hashes=None, cache_all_claim_txos=None, country=None,
payment_address=None, donation_address=None, max_send=None, max_receive=None, max_sessions=None,
session_timeout=None, drop_client=None, description=None, daily_fee=None,
database_query_timeout=None, db_max_open_files=512):
self.logger = class_logger(__name__, self.__class__.__name__)
self.db_dir = db_dir if db_dir is not None else self.required('DB_DIRECTORY')
self.daemon_url = daemon_url if daemon_url is not None else self.required('DAEMON_URL')
self.db_max_open_files = db_max_open_files
self.host = host if host is not None else self.default('HOST', 'localhost')
self.rpc_host = rpc_host if rpc_host is not None else self.default('RPC_HOST', 'localhost')
self.elastic_host = elastic_host if elastic_host is not None else self.default('ELASTIC_HOST', 'localhost')
self.elastic_port = elastic_port if elastic_port is not None else self.integer('ELASTIC_PORT', 9200)
self.loop_policy = self.set_event_loop_policy(
loop_policy if loop_policy is not None else self.default('EVENT_LOOP_POLICY', None)
)
self.obsolete(['UTXO_MB', 'HIST_MB', 'NETWORK'])
self.max_query_workers = max_query_workers if max_query_workers is not None else self.integer('MAX_QUERY_WORKERS', 4)
self.websocket_host = websocket_host if websocket_host is not None else self.default('WEBSOCKET_HOST', self.host)
self.websocket_port = websocket_port if websocket_port is not None else self.integer('WEBSOCKET_PORT', None)
if coin is not None:
assert issubclass(coin, Coin)
self.coin = coin
else:
chain = chain if chain is not None else self.default('NET', 'mainnet').strip().lower()
if chain == 'mainnet':
self.coin = LBC
elif chain == 'testnet':
self.coin = LBCTestNet
else:
self.coin = LBCRegTest
self.es_index_prefix = es_index_prefix if es_index_prefix is not None else self.default('ES_INDEX_PREFIX', '')
self.es_mode = es_mode if es_mode is not None else self.default('ES_MODE', 'writer')
self.cache_MB = cache_MB if cache_MB is not None else self.integer('CACHE_MB', 1024)
self.reorg_limit = reorg_limit if reorg_limit is not None else self.integer('REORG_LIMIT', self.coin.REORG_LIMIT)
# Server stuff
self.tcp_port = tcp_port if tcp_port is not None else self.integer('TCP_PORT', None)
self.udp_port = udp_port if udp_port is not None else self.integer('UDP_PORT', self.tcp_port)
self.ssl_port = ssl_port if ssl_port is not None else self.integer('SSL_PORT', None)
if self.ssl_port:
self.ssl_certfile = ssl_certfile if ssl_certfile is not None else self.required('SSL_CERTFILE')
self.ssl_keyfile = ssl_keyfile if ssl_keyfile is not None else self.required('SSL_KEYFILE')
self.rpc_port = rpc_port if rpc_port is not None else self.integer('RPC_PORT', 8000)
self.prometheus_port = prometheus_port if prometheus_port is not None else self.integer('PROMETHEUS_PORT', 0)
self.max_subscriptions = max_subscriptions if max_subscriptions is not None else self.integer('MAX_SUBSCRIPTIONS', 10000)
self.banner_file = banner_file if banner_file is not None else self.default('BANNER_FILE', None)
# self.tor_banner_file = self.default('TOR_BANNER_FILE', self.banner_file)
self.anon_logs = anon_logs if anon_logs is not None else self.boolean('ANON_LOGS', False)
self.log_sessions = log_sessions if log_sessions is not None else self.integer('LOG_SESSIONS', 3600)
self.allow_lan_udp = allow_lan_udp if allow_lan_udp is not None else self.boolean('ALLOW_LAN_UDP', False)
self.cache_all_tx_hashes = cache_all_tx_hashes if cache_all_tx_hashes is not None else self.boolean('CACHE_ALL_TX_HASHES', False)
self.cache_all_claim_txos = cache_all_claim_txos if cache_all_claim_txos is not None else self.boolean('CACHE_ALL_CLAIM_TXOS', False)
self.country = country if country is not None else self.default('COUNTRY', 'US')
# Peer discovery
self.peer_discovery = self.peer_discovery_enum()
self.peer_announce = self.boolean('PEER_ANNOUNCE', True)
self.peer_hubs = self.extract_peer_hubs()
# self.tor_proxy_host = self.default('TOR_PROXY_HOST', 'localhost')
# self.tor_proxy_port = self.integer('TOR_PROXY_PORT', None)
# The electrum client takes the empty string as unspecified
self.payment_address = payment_address if payment_address is not None else self.default('PAYMENT_ADDRESS', '')
self.donation_address = donation_address if donation_address is not None else self.default('DONATION_ADDRESS', '')
# Server limits to help prevent DoS
self.max_send = max_send if max_send is not None else self.integer('MAX_SEND', 1000000)
self.max_receive = max_receive if max_receive is not None else self.integer('MAX_RECEIVE', 1000000)
# self.max_subs = self.integer('MAX_SUBS', 250000)
self.max_sessions = max_sessions if max_sessions is not None else self.sane_max_sessions()
# self.max_session_subs = self.integer('MAX_SESSION_SUBS', 50000)
self.session_timeout = session_timeout if session_timeout is not None else self.integer('SESSION_TIMEOUT', 600)
self.drop_client = drop_client if drop_client is not None else self.custom("DROP_CLIENT", None, re.compile)
self.description = description if description is not None else self.default('DESCRIPTION', '')
self.daily_fee = daily_fee if daily_fee is not None else self.string_amount('DAILY_FEE', '0')
# Identities
clearnet_identity = self.clearnet_identity()
tor_identity = self.tor_identity(clearnet_identity)
self.identities = [identity
for identity in (clearnet_identity, tor_identity)
if identity is not None]
self.database_query_timeout = database_query_timeout if database_query_timeout is not None else \
(float(self.integer('QUERY_TIMEOUT_MS', 10000)) / 1000.0)
@classmethod
def default(cls, envvar, default):
return environ.get(envvar, default)
@classmethod
def boolean(cls, envvar, default):
default = 'Yes' if default else ''
return bool(cls.default(envvar, default).strip())
@classmethod
def required(cls, envvar):
value = environ.get(envvar)
if value is None:
raise cls.Error(f'required envvar {envvar} not set')
return value
@classmethod
def string_amount(cls, envvar, default):
value = environ.get(envvar, default)
amount_pattern = re.compile("[0-9]{0,10}(\.[0-9]{1,8})?")
if len(value) > 0 and not amount_pattern.fullmatch(value):
raise cls.Error(f'{value} is not a valid amount for {envvar}')
return value
@classmethod
def integer(cls, envvar, default):
value = environ.get(envvar)
if value is None:
return default
try:
return int(value)
except Exception:
raise cls.Error(f'cannot convert envvar {envvar} value {value} to an integer')
@classmethod
def custom(cls, envvar, default, parse):
value = environ.get(envvar)
if value is None:
return default
try:
return parse(value)
except Exception as e:
raise cls.Error(f'cannot parse envvar {envvar} value {value}') from e
@classmethod
def obsolete(cls, envvars):
bad = [envvar for envvar in envvars if environ.get(envvar)]
if bad:
raise cls.Error(f'remove obsolete environment variables {bad}')
@classmethod
def set_event_loop_policy(cls, policy_name: str = None):
if not policy_name or policy_name == 'default':
import asyncio
return asyncio.get_event_loop_policy()
elif policy_name == 'uvloop':
import uvloop
import asyncio
loop_policy = uvloop.EventLoopPolicy()
asyncio.set_event_loop_policy(loop_policy)
return loop_policy
raise cls.Error(f'unknown event loop policy "{policy_name}"')
def cs_host(self, *, for_rpc):
"""Returns the 'host' argument to pass to asyncio's create_server
call. The result can be a single host name string, a list of
host name strings, or an empty string to bind to all interfaces.
If rpc is True the host to use for the RPC server is returned.
Otherwise the host to use for SSL/TCP servers is returned.
"""
host = self.rpc_host if for_rpc else self.host
result = [part.strip() for part in host.split(',')]
if len(result) == 1:
result = result[0]
# An empty result indicates all interfaces, which we do not
# permitted for an RPC server.
if for_rpc and not result:
result = 'localhost'
if result == 'localhost':
# 'localhost' resolves to ::1 (ipv6) on many systems, which fails on default setup of
# docker, using 127.0.0.1 instead forces ipv4
result = '127.0.0.1'
return result
def sane_max_sessions(self):
"""Return the maximum number of sessions to permit. Normally this
is MAX_SESSIONS. However, to prevent open file exhaustion, ajdust
downwards if running with a small open file rlimit."""
env_value = self.integer('MAX_SESSIONS', 1000)
nofile_limit = resource.getrlimit(resource.RLIMIT_NOFILE)[0]
# We give the DB 250 files; allow ElectrumX 100 for itself
value = max(0, min(env_value, nofile_limit - 350))
if value < env_value:
self.logger.warning(f'lowered maximum sessions from {env_value:,d} to {value:,d} '
f'because your open file limit is {nofile_limit:,d}')
return value
def clearnet_identity(self):
host = self.default('REPORT_HOST', None)
if host is None:
return None
try:
ip = ip_address(host)
except ValueError:
bad = (not lib_util.is_valid_hostname(host)
or host.lower() == 'localhost')
else:
bad = (ip.is_multicast or ip.is_unspecified
or (ip.is_private and self.peer_announce))
if bad:
raise self.Error(f'"{host}" is not a valid REPORT_HOST')
tcp_port = self.integer('REPORT_TCP_PORT', self.tcp_port) or None
ssl_port = self.integer('REPORT_SSL_PORT', self.ssl_port) or None
if tcp_port == ssl_port:
raise self.Error('REPORT_TCP_PORT and REPORT_SSL_PORT '
f'both resolve to {tcp_port}')
return NetIdentity(
host,
tcp_port,
ssl_port,
''
)
def tor_identity(self, clearnet):
host = self.default('REPORT_HOST_TOR', None)
if host is None:
return None
if not host.endswith('.onion'):
raise self.Error(f'tor host "{host}" must end with ".onion"')
def port(port_kind):
"""Returns the clearnet identity port, if any and not zero,
otherwise the listening port."""
result = 0
if clearnet:
result = getattr(clearnet, port_kind)
return result or getattr(self, port_kind)
tcp_port = self.integer('REPORT_TCP_PORT_TOR',
port('tcp_port')) or None
ssl_port = self.integer('REPORT_SSL_PORT_TOR',
port('ssl_port')) or None
if tcp_port == ssl_port:
raise self.Error('REPORT_TCP_PORT_TOR and REPORT_SSL_PORT_TOR '
f'both resolve to {tcp_port}')
return NetIdentity(
host,
tcp_port,
ssl_port,
'_tor',
)
def hosts_dict(self):
return {identity.host: {'tcp_port': identity.tcp_port,
'ssl_port': identity.ssl_port}
for identity in self.identities}
def peer_discovery_enum(self):
pd = self.default('PEER_DISCOVERY', 'on').strip().lower()
if pd in ('off', ''):
return self.PD_OFF
elif pd == 'self':
return self.PD_SELF
else:
return self.PD_ON
def extract_peer_hubs(self):
return [hub.strip() for hub in self.default('PEER_HUBS', '').split(',') if hub.strip()]
@classmethod
def contribute_to_arg_parser(cls, parser):
parser.add_argument('--db_dir', type=str, help='path of the directory containing lbry-leveldb',
default=cls.default('DB_DIRECTORY', None))
parser.add_argument('--daemon_url',
help='URL for rpc from lbrycrd, <rpcuser>:<rpcpassword>@<lbrycrd rpc ip><lbrycrd rpc port>',
default=cls.default('DAEMON_URL', None))
parser.add_argument('--db_max_open_files', type=int, default=512,
help='number of files leveldb can have open at a time')
parser.add_argument('--host', type=str, default=cls.default('HOST', 'localhost'),
help='Interface for hub server to listen on')
parser.add_argument('--tcp_port', type=int, default=cls.integer('TCP_PORT', 50001),
help='TCP port to listen on for hub server')
parser.add_argument('--udp_port', type=int, default=cls.integer('UDP_PORT', 50001),
help='UDP port to listen on for hub server')
parser.add_argument('--rpc_host', default=cls.default('RPC_HOST', 'localhost'), type=str,
help='Listening interface for admin rpc')
parser.add_argument('--rpc_port', default=cls.integer('RPC_PORT', 8000), type=int,
help='Listening port for admin rpc')
parser.add_argument('--websocket_host', default=cls.default('WEBSOCKET_HOST', 'localhost'), type=str,
help='Listening interface for websocket')
parser.add_argument('--websocket_port', default=cls.integer('WEBSOCKET_PORT', None), type=int,
help='Listening port for websocket')
parser.add_argument('--ssl_port', default=cls.integer('SSL_PORT', None), type=int,
help='SSL port to listen on for hub server')
parser.add_argument('--ssl_certfile', default=cls.default('SSL_CERTFILE', None), type=str,
help='Path to SSL cert file')
parser.add_argument('--ssl_keyfile', default=cls.default('SSL_KEYFILE', None), type=str,
help='Path to SSL key file')
parser.add_argument('--reorg_limit', default=cls.integer('REORG_LIMIT', 200), type=int, help='Max reorg depth')
parser.add_argument('--elastic_host', default=cls.default('ELASTIC_HOST', 'localhost'), type=str,
help='elasticsearch host')
parser.add_argument('--elastic_port', default=cls.integer('ELASTIC_PORT', 9200), type=int,
help='elasticsearch port')
parser.add_argument('--es_mode', default=cls.default('ES_MODE', 'writer'), type=str,
choices=['reader', 'writer'])
parser.add_argument('--es_index_prefix', default=cls.default('ES_INDEX_PREFIX', ''), type=str)
parser.add_argument('--loop_policy', default=cls.default('EVENT_LOOP_POLICY', 'default'), type=str,
choices=['default', 'uvloop'])
parser.add_argument('--max_query_workers', type=int, default=cls.integer('MAX_QUERY_WORKERS', 4),
help='number of threads used by the request handler to read the database')
parser.add_argument('--cache_MB', type=int, default=cls.integer('CACHE_MB', 1024),
help='size of the leveldb lru cache, in megabytes')
parser.add_argument('--cache_all_tx_hashes', type=bool,
help='Load all tx hashes into memory. This will make address subscriptions and sync, '
'resolve, transaction fetching, and block sync all faster at the expense of higher '
'memory usage')
parser.add_argument('--cache_all_claim_txos', type=bool,
help='Load all claim txos into memory. This will make address subscriptions and sync, '
'resolve, transaction fetching, and block sync all faster at the expense of higher '
'memory usage')
parser.add_argument('--prometheus_port', type=int, default=cls.integer('PROMETHEUS_PORT', 0),
help='port for hub prometheus metrics to listen on, disabled by default')
parser.add_argument('--max_subscriptions', type=int, default=cls.integer('MAX_SUBSCRIPTIONS', 10000),
help='max subscriptions per connection')
parser.add_argument('--banner_file', type=str, default=cls.default('BANNER_FILE', None),
help='path to file containing banner text')
parser.add_argument('--anon_logs', type=bool, default=cls.boolean('ANON_LOGS', False),
help="don't log ip addresses")
parser.add_argument('--allow_lan_udp', type=bool, default=cls.boolean('ALLOW_LAN_UDP', False),
help='reply to hub UDP ping messages from LAN ip addresses')
parser.add_argument('--country', type=str, default=cls.default('COUNTRY', 'US'), help='')
parser.add_argument('--max_send', type=int, default=cls.default('MAX_SEND', 1000000), help='')
parser.add_argument('--max_receive', type=int, default=cls.default('MAX_RECEIVE', 1000000), help='')
parser.add_argument('--max_sessions', type=int, default=cls.default('MAX_SESSIONS', 1000), help='')
parser.add_argument('--session_timeout', type=int, default=cls.default('SESSION_TIMEOUT', 600), help='')
parser.add_argument('--drop_client', type=str, default=cls.default('DROP_CLIENT', None), help='')
parser.add_argument('--description', type=str, default=cls.default('DESCRIPTION', ''), help='')
parser.add_argument('--daily_fee', type=float, default=cls.default('DAILY_FEE', 0.0), help='')
parser.add_argument('--payment_address', type=str, default=cls.default('PAYMENT_ADDRESS', ''), help='')
parser.add_argument('--donation_address', type=str, default=cls.default('DONATION_ADDRESS', ''), help='')
parser.add_argument('--chain', type=str, default=cls.default('NET', 'mainnet'),
help="Which chain to use, default is mainnet")
parser.add_argument('--query_timeout_ms', type=int, default=cls.integer('QUERY_TIMEOUT_MS', 10000),
help="elasticsearch query timeout")
@classmethod
def from_arg_parser(cls, args):
return cls(
db_dir=args.db_dir, daemon_url=args.daemon_url, db_max_open_files=args.db_max_open_files,
host=args.host, rpc_host=args.rpc_host, elastic_host=args.elastic_host, elastic_port=args.elastic_port,
loop_policy=args.loop_policy, max_query_workers=args.max_query_workers, websocket_host=args.websocket_host,
websocket_port=args.websocket_port, chain=args.chain, es_index_prefix=args.es_index_prefix,
es_mode=args.es_mode, cache_MB=args.cache_MB, reorg_limit=args.reorg_limit, tcp_port=args.tcp_port,
udp_port=args.udp_port, ssl_port=args.ssl_port, ssl_certfile=args.ssl_certfile,
ssl_keyfile=args.ssl_keyfile, rpc_port=args.rpc_port, prometheus_port=args.prometheus_port,
max_subscriptions=args.max_subscriptions, banner_file=args.banner_file, anon_logs=args.anon_logs,
log_sessions=None, allow_lan_udp=args.allow_lan_udp,
cache_all_tx_hashes=args.cache_all_tx_hashes, cache_all_claim_txos=args.cache_all_claim_txos,
country=args.country, payment_address=args.payment_address, donation_address=args.donation_address,
max_send=args.max_send, max_receive=args.max_receive, max_sessions=args.max_sessions,
session_timeout=args.session_timeout, drop_client=args.drop_client, description=args.description,
daily_fee=args.daily_fee, database_query_timeout=(args.query_timeout_ms / 1000)
)

View file

@ -1,160 +0,0 @@
# Copyright (c) 2016-2017, Neil Booth
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Cryptograph hash functions and related classes."""
import hashlib
import hmac
from lbry.wallet.server.util import bytes_to_int, int_to_bytes, hex_to_bytes
_sha256 = hashlib.sha256
_sha512 = hashlib.sha512
_new_hash = hashlib.new
_new_hmac = hmac.new
HASHX_LEN = 11
CLAIM_HASH_LEN = 20
def sha256(x):
"""Simple wrapper of hashlib sha256."""
return _sha256(x).digest()
def ripemd160(x):
"""Simple wrapper of hashlib ripemd160."""
h = _new_hash('ripemd160')
h.update(x)
return h.digest()
def double_sha256(x):
"""SHA-256 of SHA-256, as used extensively in bitcoin."""
return sha256(sha256(x))
def hmac_sha512(key, msg):
"""Use SHA-512 to provide an HMAC."""
return _new_hmac(key, msg, _sha512).digest()
def hash160(x):
"""RIPEMD-160 of SHA-256.
Used to make bitcoin addresses from pubkeys."""
return ripemd160(sha256(x))
def hash_to_hex_str(x: bytes) -> str:
"""Convert a big-endian binary hash to displayed hex string.
Display form of a binary hash is reversed and converted to hex.
"""
return x[::-1].hex()
def hex_str_to_hash(x: str) -> bytes:
"""Convert a displayed hex string to a binary hash."""
return hex_to_bytes(x)[::-1]
class Base58Error(Exception):
"""Exception used for Base58 errors."""
class Base58:
"""Class providing base 58 functionality."""
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
assert len(chars) == 58
cmap = {c: n for n, c in enumerate(chars)}
@staticmethod
def char_value(c):
val = Base58.cmap.get(c)
if val is None:
raise Base58Error(f'invalid base 58 character "{c}"')
return val
@staticmethod
def decode(txt):
"""Decodes txt into a big-endian bytearray."""
if not isinstance(txt, str):
raise TypeError('a string is required')
if not txt:
raise Base58Error('string cannot be empty')
value = 0
for c in txt:
value = value * 58 + Base58.char_value(c)
result = int_to_bytes(value)
# Prepend leading zero bytes if necessary
count = 0
for c in txt:
if c != '1':
break
count += 1
if count:
result = bytes(count) + result
return result
@staticmethod
def encode(be_bytes):
"""Converts a big-endian bytearray into a base58 string."""
value = bytes_to_int(be_bytes)
txt = ''
while value:
value, mod = divmod(value, 58)
txt += Base58.chars[mod]
for byte in be_bytes:
if byte != 0:
break
txt += '1'
return txt[::-1]
@staticmethod
def decode_check(txt, *, hash_fn=double_sha256):
"""Decodes a Base58Check-encoded string to a payload. The version
prefixes it."""
be_bytes = Base58.decode(txt)
result, check = be_bytes[:-4], be_bytes[-4:]
if check != hash_fn(result)[:4]:
raise Base58Error(f'invalid base 58 checksum for {txt}')
return result
@staticmethod
def encode_check(payload, *, hash_fn=double_sha256):
"""Encodes a payload bytearray (which includes the version byte(s))
into a Base58Check string."""
be_bytes = payload + hash_fn(payload)[:4]
return Base58.encode(be_bytes)

File diff suppressed because it is too large Load diff

View file

@ -1,361 +0,0 @@
# Copyright (c) 2016-2018, Neil Booth
#
# All rights reserved.
#
# See the file "LICENCE" for information about the copyright
# and warranty status of this software.
"""Mempool handling."""
import asyncio
import itertools
import time
import attr
import typing
from typing import Set, Optional, Callable, Awaitable
from collections import defaultdict
from prometheus_client import Histogram
from lbry.wallet.server.hash import hash_to_hex_str, hex_str_to_hash
from lbry.wallet.server.util import class_logger, chunks
from lbry.wallet.server.leveldb import UTXO
if typing.TYPE_CHECKING:
from lbry.wallet.server.session import LBRYSessionManager
@attr.s(slots=True)
class MemPoolTx:
prevouts = attr.ib()
# A pair is a (hashX, value) tuple
in_pairs = attr.ib()
out_pairs = attr.ib()
fee = attr.ib()
size = attr.ib()
raw_tx = attr.ib()
@attr.s(slots=True)
class MemPoolTxSummary:
hash = attr.ib()
fee = attr.ib()
has_unconfirmed_inputs = attr.ib()
NAMESPACE = "wallet_server"
HISTOGRAM_BUCKETS = (
.005, .01, .025, .05, .075, .1, .25, .5, .75, 1.0, 2.5, 5.0, 7.5, 10.0, 15.0, 20.0, 30.0, 60.0, float('inf')
)
mempool_process_time_metric = Histogram(
"processed_mempool", "Time to process mempool and notify touched addresses",
namespace=NAMESPACE, buckets=HISTOGRAM_BUCKETS
)
class MemPool:
def __init__(self, coin, daemon, db, state_lock: asyncio.Lock, refresh_secs=1.0, log_status_secs=120.0):
self.coin = coin
self._daemon = daemon
self._db = db
self._touched_mp = {}
self._touched_bp = {}
self._highest_block = -1
self.logger = class_logger(__name__, self.__class__.__name__)
self.txs = {}
self.hashXs = defaultdict(set) # None can be a key
self.cached_compact_histogram = []
self.refresh_secs = refresh_secs
self.log_status_secs = log_status_secs
# Prevents mempool refreshes during fee histogram calculation
self.lock = state_lock
self.wakeup = asyncio.Event()
self.mempool_process_time_metric = mempool_process_time_metric
self.notified_mempool_txs = set()
self.notify_sessions: Optional[Callable[[int, Set[bytes], Set[bytes]], Awaitable[None]]] = None
async def _logging(self, synchronized_event):
"""Print regular logs of mempool stats."""
self.logger.info('beginning processing of daemon mempool. '
'This can take some time...')
start = time.perf_counter()
await synchronized_event.wait()
elapsed = time.perf_counter() - start
self.logger.info(f'synced in {elapsed:.2f}s')
while True:
self.logger.info(f'{len(self.txs):,d} txs '
f'touching {len(self.hashXs):,d} addresses')
await asyncio.sleep(self.log_status_secs)
await synchronized_event.wait()
def _accept_transactions(self, tx_map, utxo_map, touched):
"""Accept transactions in tx_map to the mempool if all their inputs
can be found in the existing mempool or a utxo_map from the
DB.
Returns an (unprocessed tx_map, unspent utxo_map) pair.
"""
hashXs = self.hashXs
txs = self.txs
deferred = {}
unspent = set(utxo_map)
# Try to find all prevouts so we can accept the TX
for hash, tx in tx_map.items():
in_pairs = []
try:
for prevout in tx.prevouts:
utxo = utxo_map.get(prevout)
if not utxo:
prev_hash, prev_index = prevout
# Raises KeyError if prev_hash is not in txs
utxo = txs[prev_hash].out_pairs[prev_index]
in_pairs.append(utxo)
except KeyError:
deferred[hash] = tx
continue
# Spend the prevouts
unspent.difference_update(tx.prevouts)
# Save the in_pairs, compute the fee and accept the TX
tx.in_pairs = tuple(in_pairs)
# Avoid negative fees if dealing with generation-like transactions
# because some in_parts would be missing
tx.fee = max(0, (sum(v for _, v in tx.in_pairs) -
sum(v for _, v in tx.out_pairs)))
txs[hash] = tx
for hashX, value in itertools.chain(tx.in_pairs, tx.out_pairs):
touched.add(hashX)
hashXs[hashX].add(hash)
return deferred, {prevout: utxo_map[prevout] for prevout in unspent}
async def _mempool_loop(self, synchronized_event):
try:
return await self._refresh_hashes(synchronized_event)
except asyncio.CancelledError:
raise
except Exception as e:
self.logger.exception("MEMPOOL DIED")
raise e
async def _refresh_hashes(self, synchronized_event):
"""Refresh our view of the daemon's mempool."""
while True:
start = time.perf_counter()
height = self._daemon.cached_height()
hex_hashes = await self._daemon.mempool_hashes()
if height != await self._daemon.height():
continue
hashes = {hex_str_to_hash(hh) for hh in hex_hashes}
async with self.lock:
new_hashes = hashes.difference(self.notified_mempool_txs)
touched = await self._process_mempool(hashes)
self.notified_mempool_txs.update(new_hashes)
new_touched = {
touched_hashx for touched_hashx, txs in self.hashXs.items() if txs.intersection(new_hashes)
}
synchronized_event.set()
synchronized_event.clear()
await self.on_mempool(touched, new_touched, height)
duration = time.perf_counter() - start
self.mempool_process_time_metric.observe(duration)
try:
# we wait up to `refresh_secs` but go early if a broadcast happens (which triggers wakeup event)
await asyncio.wait_for(self.wakeup.wait(), timeout=self.refresh_secs)
except asyncio.TimeoutError:
pass
finally:
self.wakeup.clear()
async def _process_mempool(self, all_hashes):
# Re-sync with the new set of hashes
txs = self.txs
hashXs = self.hashXs # hashX: [tx_hash, ...]
touched = set()
# First handle txs that have disappeared
for tx_hash in set(txs).difference(all_hashes):
tx = txs.pop(tx_hash)
tx_hashXs = {hashX for hashX, value in tx.in_pairs}
tx_hashXs.update(hashX for hashX, value in tx.out_pairs)
for hashX in tx_hashXs:
hashXs[hashX].remove(tx_hash)
if not hashXs[hashX]:
del hashXs[hashX]
touched.update(tx_hashXs)
# Process new transactions
new_hashes = list(all_hashes.difference(txs))
if new_hashes:
fetches = []
for hashes in chunks(new_hashes, 200):
fetches.append(self._fetch_and_accept(hashes, all_hashes, touched))
tx_map = {}
utxo_map = {}
for fetch in asyncio.as_completed(fetches):
deferred, unspent = await fetch
tx_map.update(deferred)
utxo_map.update(unspent)
prior_count = 0
# FIXME: this is not particularly efficient
while tx_map and len(tx_map) != prior_count:
prior_count = len(tx_map)
tx_map, utxo_map = self._accept_transactions(tx_map, utxo_map, touched)
if tx_map:
self.logger.info(f'{len(tx_map)} txs dropped')
return touched
async def _fetch_and_accept(self, hashes, all_hashes, touched):
"""Fetch a list of mempool transactions."""
raw_txs = await self._daemon.getrawtransactions((hash_to_hex_str(hash) for hash in hashes))
to_hashX = self.coin.hashX_from_script
deserializer = self.coin.DESERIALIZER
tx_map = {}
for hash, raw_tx in zip(hashes, raw_txs):
# The daemon may have evicted the tx from its
# mempool or it may have gotten in a block
if not raw_tx:
continue
tx, tx_size = deserializer(raw_tx).read_tx_and_vsize()
# Convert the inputs and outputs into (hashX, value) pairs
# Drop generation-like inputs from MemPoolTx.prevouts
txin_pairs = tuple((txin.prev_hash, txin.prev_idx)
for txin in tx.inputs
if not txin.is_generation())
txout_pairs = tuple((to_hashX(txout.pk_script), txout.value)
for txout in tx.outputs)
tx_map[hash] = MemPoolTx(txin_pairs, None, txout_pairs,
0, tx_size, raw_tx)
# Determine all prevouts not in the mempool, and fetch the
# UTXO information from the database. Failed prevout lookups
# return None - concurrent database updates happen - which is
# relied upon by _accept_transactions. Ignore prevouts that are
# generation-like.
prevouts = tuple(prevout for tx in tx_map.values()
for prevout in tx.prevouts
if prevout[0] not in all_hashes)
utxos = await self._db.lookup_utxos(prevouts)
utxo_map = dict(zip(prevouts, utxos))
return self._accept_transactions(tx_map, utxo_map, touched)
#
# External interface
#
async def keep_synchronized(self, synchronized_event):
"""Keep the mempool synchronized with the daemon."""
await asyncio.wait([
self._mempool_loop(synchronized_event),
# self._refresh_histogram(synchronized_event),
self._logging(synchronized_event)
])
async def balance_delta(self, hashX):
"""Return the unconfirmed amount in the mempool for hashX.
Can be positive or negative.
"""
value = 0
if hashX in self.hashXs:
for hash in self.hashXs[hashX]:
tx = self.txs[hash]
value -= sum(v for h168, v in tx.in_pairs if h168 == hashX)
value += sum(v for h168, v in tx.out_pairs if h168 == hashX)
return value
def compact_fee_histogram(self):
"""Return a compact fee histogram of the current mempool."""
return self.cached_compact_histogram
async def potential_spends(self, hashX):
"""Return a set of (prev_hash, prev_idx) pairs from mempool
transactions that touch hashX.
None, some or all of these may be spends of the hashX, but all
actual spends of it (in the DB or mempool) will be included.
"""
result = set()
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs[tx_hash]
result.update(tx.prevouts)
return result
def transaction_summaries(self, hashX):
"""Return a list of MemPoolTxSummary objects for the hashX."""
result = []
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs[tx_hash]
has_ui = any(hash in self.txs for hash, idx in tx.prevouts)
result.append(MemPoolTxSummary(tx_hash, tx.fee, has_ui))
return result
async def unordered_UTXOs(self, hashX):
"""Return an unordered list of UTXO named tuples from mempool
transactions that pay to hashX.
This does not consider if any other mempool transactions spend
the outputs.
"""
utxos = []
for tx_hash in self.hashXs.get(hashX, ()):
tx = self.txs.get(tx_hash)
for pos, (hX, value) in enumerate(tx.out_pairs):
if hX == hashX:
utxos.append(UTXO(-1, pos, tx_hash, 0, value))
return utxos
def get_mempool_height(self, tx_hash):
# Height Progression
# -2: not broadcast
# -1: in mempool but has unconfirmed inputs
# 0: in mempool and all inputs confirmed
# +num: confirmed in a specific block (height)
if tx_hash not in self.txs:
return -2
tx = self.txs[tx_hash]
unspent_inputs = sum(1 if hash in self.txs else 0 for hash, idx in tx.prevouts)
if unspent_inputs:
return -1
return 0
async def _maybe_notify(self, new_touched):
tmp, tbp = self._touched_mp, self._touched_bp
common = set(tmp).intersection(tbp)
if common:
height = max(common)
elif tmp and max(tmp) == self._highest_block:
height = self._highest_block
else:
# Either we are processing a block and waiting for it to
# come in, or we have not yet had a mempool update for the
# new block height
return
touched = tmp.pop(height)
for old in [h for h in tmp if h <= height]:
del tmp[old]
for old in [h for h in tbp if h <= height]:
touched.update(tbp.pop(old))
# print("notify", height, len(touched), len(new_touched))
await self.notify_sessions(height, touched, new_touched)
async def start(self, height, session_manager: 'LBRYSessionManager'):
self._highest_block = height
self.notify_sessions = session_manager._notify_sessions
await self.notify_sessions(height, set(), set())
async def on_mempool(self, touched, new_touched, height):
self._touched_mp[height] = touched
await self._maybe_notify(new_touched)
async def on_block(self, touched, height):
self._touched_bp[height] = touched
self._highest_block = height
await self._maybe_notify(set())

View file

@ -1,258 +0,0 @@
# Copyright (c) 2018, Neil Booth
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# and warranty status of this software.
"""Merkle trees, branches, proofs and roots."""
from asyncio import Event
from math import ceil, log
from lbry.wallet.server.hash import double_sha256
class Merkle:
"""Perform merkle tree calculations on binary hashes using a given hash
function.
If the hash count is not even, the final hash is repeated when
calculating the next merkle layer up the tree.
"""
def __init__(self, hash_func=double_sha256):
self.hash_func = hash_func
@staticmethod
def tree_depth(hash_count):
return Merkle.branch_length(hash_count) + 1
@staticmethod
def branch_length(hash_count):
"""Return the length of a merkle branch given the number of hashes."""
if not isinstance(hash_count, int):
raise TypeError('hash_count must be an integer')
if hash_count < 1:
raise ValueError('hash_count must be at least 1')
return ceil(log(hash_count, 2))
@staticmethod
def branch_and_root(hashes, index, length=None, hash_func=double_sha256):
"""Return a (merkle branch, merkle_root) pair given hashes, and the
index of one of those hashes.
"""
hashes = list(hashes)
if not isinstance(index, int):
raise TypeError('index must be an integer')
# This also asserts hashes is not empty
if not 0 <= index < len(hashes):
raise ValueError(f"index '{index}/{len(hashes)}' out of range")
natural_length = Merkle.branch_length(len(hashes))
if length is None:
length = natural_length
else:
if not isinstance(length, int):
raise TypeError('length must be an integer')
if length < natural_length:
raise ValueError('length out of range')
branch = []
for _ in range(length):
if len(hashes) & 1:
hashes.append(hashes[-1])
branch.append(hashes[index ^ 1])
index >>= 1
hashes = [hash_func(hashes[n] + hashes[n + 1])
for n in range(0, len(hashes), 2)]
return branch, hashes[0]
@staticmethod
def root(hashes, length=None):
"""Return the merkle root of a non-empty iterable of binary hashes."""
branch, root = Merkle.branch_and_root(hashes, 0, length)
return root
# @staticmethod
# def root_from_proof(hash, branch, index, hash_func=double_sha256):
# """Return the merkle root given a hash, a merkle branch to it, and
# its index in the hashes array.
#
# branch is an iterable sorted deepest to shallowest. If the
# returned root is the expected value then the merkle proof is
# verified.
#
# The caller should have confirmed the length of the branch with
# branch_length(). Unfortunately this is not easily done for
# bitcoin transactions as the number of transactions in a block
# is unknown to an SPV client.
# """
# for elt in branch:
# if index & 1:
# hash = hash_func(elt + hash)
# else:
# hash = hash_func(hash + elt)
# index >>= 1
# if index:
# raise ValueError('index out of range for branch')
# return hash
@staticmethod
def level(hashes, depth_higher):
"""Return a level of the merkle tree of hashes the given depth
higher than the bottom row of the original tree."""
size = 1 << depth_higher
root = Merkle.root
return [root(hashes[n: n + size], depth_higher)
for n in range(0, len(hashes), size)]
@staticmethod
def branch_and_root_from_level(level, leaf_hashes, index,
depth_higher):
"""Return a (merkle branch, merkle_root) pair when a merkle-tree has a
level cached.
To maximally reduce the amount of data hashed in computing a
markle branch, cache a tree of depth N at level N // 2.
level is a list of hashes in the middle of the tree (returned
by level())
leaf_hashes are the leaves needed to calculate a partial branch
up to level.
depth_higher is how much higher level is than the leaves of the tree
index is the index in the full list of hashes of the hash whose
merkle branch we want.
"""
if not isinstance(level, list):
raise TypeError("level must be a list")
if not isinstance(leaf_hashes, list):
raise TypeError("leaf_hashes must be a list")
leaf_index = (index >> depth_higher) << depth_higher
leaf_branch, leaf_root = Merkle.branch_and_root(
leaf_hashes, index - leaf_index, depth_higher)
index >>= depth_higher
level_branch, root = Merkle.branch_and_root(level, index)
# Check last so that we know index is in-range
if leaf_root != level[index]:
raise ValueError('leaf hashes inconsistent with level')
return leaf_branch + level_branch, root
class MerkleCache:
"""A cache to calculate merkle branches efficiently."""
def __init__(self, merkle, source_func):
"""Initialise a cache hashes taken from source_func:
async def source_func(index, count):
...
"""
self.merkle = merkle
self.source_func = source_func
self.length = 0
self.depth_higher = 0
self.initialized = Event()
def _segment_length(self):
return 1 << self.depth_higher
def _leaf_start(self, index):
"""Given a level's depth higher and a hash index, return the leaf
index and leaf hash count needed to calculate a merkle branch.
"""
depth_higher = self.depth_higher
return (index >> depth_higher) << depth_higher
def _level(self, hashes):
return self.merkle.level(hashes, self.depth_higher)
async def _extend_to(self, length):
"""Extend the length of the cache if necessary."""
if length <= self.length:
return
# Start from the beginning of any final partial segment.
# Retain the value of depth_higher; in practice this is fine
start = self._leaf_start(self.length)
hashes = await self.source_func(start, length - start)
self.level[start >> self.depth_higher:] = self._level(hashes)
self.length = length
async def _level_for(self, length):
"""Return a (level_length, final_hash) pair for a truncation
of the hashes to the given length."""
if length == self.length:
return self.level
level = self.level[:length >> self.depth_higher]
leaf_start = self._leaf_start(length)
count = min(self._segment_length(), length - leaf_start)
hashes = await self.source_func(leaf_start, count)
level += self._level(hashes)
return level
async def initialize(self, length):
"""Call to initialize the cache to a source of given length."""
self.length = length
self.depth_higher = self.merkle.tree_depth(length) // 2
self.level = self._level(await self.source_func(0, length))
self.initialized.set()
def truncate(self, length):
"""Truncate the cache so it covers no more than length underlying
hashes."""
if not isinstance(length, int):
raise TypeError('length must be an integer')
if length <= 0:
raise ValueError('length must be positive')
if length >= self.length:
return
length = self._leaf_start(length)
self.length = length
self.level[length >> self.depth_higher:] = []
async def branch_and_root(self, length, index):
"""Return a merkle branch and root. Length is the number of
hashes used to calculate the merkle root, index is the position
of the hash to calculate the branch of.
index must be less than length, which must be at least 1."""
if not isinstance(length, int):
raise TypeError('length must be an integer')
if not isinstance(index, int):
raise TypeError('index must be an integer')
if length <= 0:
raise ValueError('length must be positive')
if index >= length:
raise ValueError('index must be less than length')
await self.initialized.wait()
await self._extend_to(length)
leaf_start = self._leaf_start(index)
count = min(self._segment_length(), length - leaf_start)
leaf_hashes = await self.source_func(leaf_start, count)
if length < self._segment_length():
return self.merkle.branch_and_root(leaf_hashes, index)
level = await self._level_for(length)
return self.merkle.branch_and_root_from_level(
level, leaf_hashes, index, self.depth_higher)

View file

@ -1,135 +0,0 @@
import time
import math
from typing import Tuple
def calculate_elapsed(start) -> int:
return int((time.perf_counter() - start) * 1000)
def calculate_avg_percentiles(data) -> Tuple[int, int, int, int, int, int, int, int]:
if not data:
return 0, 0, 0, 0, 0, 0, 0, 0
data.sort()
size = len(data)
return (
int(sum(data) / size),
data[0],
data[math.ceil(size * .05) - 1],
data[math.ceil(size * .25) - 1],
data[math.ceil(size * .50) - 1],
data[math.ceil(size * .75) - 1],
data[math.ceil(size * .95) - 1],
data[-1]
)
def remove_select_list(sql) -> str:
return sql[sql.index('FROM'):]
class APICallMetrics:
def __init__(self, name):
self.name = name
# total requests received
self.receive_count = 0
self.cache_response_count = 0
# millisecond timings for query based responses
self.query_response_times = []
self.query_intrp_times = []
self.query_error_times = []
self.query_python_times = []
self.query_wait_times = []
self.query_sql_times = [] # aggregate total of multiple SQL calls made per request
self.individual_sql_times = [] # every SQL query run on server
# actual queries
self.errored_queries = set()
self.interrupted_queries = set()
def to_json(self):
return {
# total requests received
"receive_count": self.receive_count,
# sum of these is total responses made
"cache_response_count": self.cache_response_count,
"query_response_count": len(self.query_response_times),
"intrp_response_count": len(self.query_intrp_times),
"error_response_count": len(self.query_error_times),
# millisecond timings for non-cache responses
"response": calculate_avg_percentiles(self.query_response_times),
"interrupt": calculate_avg_percentiles(self.query_intrp_times),
"error": calculate_avg_percentiles(self.query_error_times),
# response, interrupt and error each also report the python, wait and sql stats:
"python": calculate_avg_percentiles(self.query_python_times),
"wait": calculate_avg_percentiles(self.query_wait_times),
"sql": calculate_avg_percentiles(self.query_sql_times),
# extended timings for individual sql executions
"individual_sql": calculate_avg_percentiles(self.individual_sql_times),
"individual_sql_count": len(self.individual_sql_times),
# actual queries
"errored_queries": list(self.errored_queries),
"interrupted_queries": list(self.interrupted_queries),
}
def start(self):
self.receive_count += 1
def cache_response(self):
self.cache_response_count += 1
def _add_query_timings(self, request_total_time, metrics):
if metrics and 'execute_query' in metrics:
sub_process_total = metrics[self.name][0]['total']
individual_query_times = [f['total'] for f in metrics['execute_query']]
aggregated_query_time = sum(individual_query_times)
self.individual_sql_times.extend(individual_query_times)
self.query_sql_times.append(aggregated_query_time)
self.query_python_times.append(sub_process_total - aggregated_query_time)
self.query_wait_times.append(request_total_time - sub_process_total)
@staticmethod
def _add_queries(query_set, metrics):
if metrics and 'execute_query' in metrics:
for execute_query in metrics['execute_query']:
if 'sql' in execute_query:
query_set.add(remove_select_list(execute_query['sql']))
def query_response(self, start, metrics):
self.query_response_times.append(calculate_elapsed(start))
self._add_query_timings(self.query_response_times[-1], metrics)
def query_interrupt(self, start, metrics):
self.query_intrp_times.append(calculate_elapsed(start))
self._add_queries(self.interrupted_queries, metrics)
self._add_query_timings(self.query_intrp_times[-1], metrics)
def query_error(self, start, metrics):
self.query_error_times.append(calculate_elapsed(start))
self._add_queries(self.errored_queries, metrics)
self._add_query_timings(self.query_error_times[-1], metrics)
class ServerLoadData:
def __init__(self):
self._apis = {}
def for_api(self, name) -> APICallMetrics:
if name not in self._apis:
self._apis[name] = APICallMetrics(name)
return self._apis[name]
def to_json_and_reset(self, status):
try:
return {
'api': {name: api.to_json() for name, api in self._apis.items()},
'status': status
}
finally:
self._apis = {}

View file

@ -1,289 +0,0 @@
# Copyright (c) 2016-2017, Neil Booth
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# and warranty status of this software.
"""Script-related classes and functions."""
from collections import namedtuple
from lbry.wallet.server.util import unpack_le_uint16_from, unpack_le_uint32_from, \
pack_le_uint16, pack_le_uint32
class EnumError(Exception):
pass
class Enumeration:
def __init__(self, name, enumList):
self.__doc__ = name
lookup = {}
reverseLookup = {}
i = 0
uniqueNames = set()
uniqueValues = set()
for x in enumList:
if isinstance(x, tuple):
x, i = x
if not isinstance(x, str):
raise EnumError(f"enum name {x} not a string")
if not isinstance(i, int):
raise EnumError(f"enum value {i} not an integer")
if x in uniqueNames:
raise EnumError(f"enum name {x} not unique")
if i in uniqueValues:
raise EnumError(f"enum value {i} not unique")
uniqueNames.add(x)
uniqueValues.add(i)
lookup[x] = i
reverseLookup[i] = x
i = i + 1
self.lookup = lookup
self.reverseLookup = reverseLookup
def __getattr__(self, attr):
result = self.lookup.get(attr)
if result is None:
raise AttributeError(f'enumeration has no member {attr}')
return result
def whatis(self, value):
return self.reverseLookup[value]
class ScriptError(Exception):
"""Exception used for script errors."""
OpCodes = Enumeration("Opcodes", [
("OP_0", 0), ("OP_PUSHDATA1", 76),
"OP_PUSHDATA2", "OP_PUSHDATA4", "OP_1NEGATE",
"OP_RESERVED",
"OP_1", "OP_2", "OP_3", "OP_4", "OP_5", "OP_6", "OP_7", "OP_8",
"OP_9", "OP_10", "OP_11", "OP_12", "OP_13", "OP_14", "OP_15", "OP_16",
"OP_NOP", "OP_VER", "OP_IF", "OP_NOTIF", "OP_VERIF", "OP_VERNOTIF",
"OP_ELSE", "OP_ENDIF", "OP_VERIFY", "OP_RETURN",
"OP_TOALTSTACK", "OP_FROMALTSTACK", "OP_2DROP", "OP_2DUP", "OP_3DUP",
"OP_2OVER", "OP_2ROT", "OP_2SWAP", "OP_IFDUP", "OP_DEPTH", "OP_DROP",
"OP_DUP", "OP_NIP", "OP_OVER", "OP_PICK", "OP_ROLL", "OP_ROT",
"OP_SWAP", "OP_TUCK",
"OP_CAT", "OP_SUBSTR", "OP_LEFT", "OP_RIGHT", "OP_SIZE",
"OP_INVERT", "OP_AND", "OP_OR", "OP_XOR", "OP_EQUAL", "OP_EQUALVERIFY",
"OP_RESERVED1", "OP_RESERVED2",
"OP_1ADD", "OP_1SUB", "OP_2MUL", "OP_2DIV", "OP_NEGATE", "OP_ABS",
"OP_NOT", "OP_0NOTEQUAL", "OP_ADD", "OP_SUB", "OP_MUL", "OP_DIV", "OP_MOD",
"OP_LSHIFT", "OP_RSHIFT", "OP_BOOLAND", "OP_BOOLOR", "OP_NUMEQUAL",
"OP_NUMEQUALVERIFY", "OP_NUMNOTEQUAL", "OP_LESSTHAN", "OP_GREATERTHAN",
"OP_LESSTHANOREQUAL", "OP_GREATERTHANOREQUAL", "OP_MIN", "OP_MAX",
"OP_WITHIN",
"OP_RIPEMD160", "OP_SHA1", "OP_SHA256", "OP_HASH160", "OP_HASH256",
"OP_CODESEPARATOR", "OP_CHECKSIG", "OP_CHECKSIGVERIFY", "OP_CHECKMULTISIG",
"OP_CHECKMULTISIGVERIFY",
"OP_NOP1",
"OP_CHECKLOCKTIMEVERIFY", "OP_CHECKSEQUENCEVERIFY"
])
# Paranoia to make it hard to create bad scripts
assert OpCodes.OP_DUP == 0x76
assert OpCodes.OP_HASH160 == 0xa9
assert OpCodes.OP_EQUAL == 0x87
assert OpCodes.OP_EQUALVERIFY == 0x88
assert OpCodes.OP_CHECKSIG == 0xac
assert OpCodes.OP_CHECKMULTISIG == 0xae
def _match_ops(ops, pattern):
if len(ops) != len(pattern):
return False
for op, pop in zip(ops, pattern):
if pop != op:
# -1 means 'data push', whose op is an (op, data) tuple
if pop == -1 and isinstance(op, tuple):
continue
return False
return True
class ScriptPubKey:
"""A class for handling a tx output script that gives conditions
necessary for spending.
"""
TO_ADDRESS_OPS = [OpCodes.OP_DUP, OpCodes.OP_HASH160, -1,
OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]
TO_P2SH_OPS = [OpCodes.OP_HASH160, -1, OpCodes.OP_EQUAL]
TO_PUBKEY_OPS = [-1, OpCodes.OP_CHECKSIG]
PayToHandlers = namedtuple('PayToHandlers', 'address script_hash pubkey '
'unspendable strange')
@classmethod
def pay_to(cls, handlers, script):
"""Parse a script, invoke the appropriate handler and
return the result.
One of the following handlers is invoked:
handlers.address(hash160)
handlers.script_hash(hash160)
handlers.pubkey(pubkey)
handlers.unspendable()
handlers.strange(script)
"""
try:
ops = Script.get_ops(script)
except ScriptError:
return handlers.unspendable()
match = _match_ops
if match(ops, cls.TO_ADDRESS_OPS):
return handlers.address(ops[2][-1])
if match(ops, cls.TO_P2SH_OPS):
return handlers.script_hash(ops[1][-1])
if match(ops, cls.TO_PUBKEY_OPS):
return handlers.pubkey(ops[0][-1])
if ops and ops[0] == OpCodes.OP_RETURN:
return handlers.unspendable()
return handlers.strange(script)
@classmethod
def P2SH_script(cls, hash160):
return (bytes([OpCodes.OP_HASH160])
+ Script.push_data(hash160)
+ bytes([OpCodes.OP_EQUAL]))
@classmethod
def P2PKH_script(cls, hash160):
return (bytes([OpCodes.OP_DUP, OpCodes.OP_HASH160])
+ Script.push_data(hash160)
+ bytes([OpCodes.OP_EQUALVERIFY, OpCodes.OP_CHECKSIG]))
@classmethod
def validate_pubkey(cls, pubkey, req_compressed=False):
if isinstance(pubkey, (bytes, bytearray)):
if len(pubkey) == 33 and pubkey[0] in (2, 3):
return # Compressed
if len(pubkey) == 65 and pubkey[0] == 4:
if not req_compressed:
return
raise PubKeyError('uncompressed pubkeys are invalid')
raise PubKeyError(f'invalid pubkey {pubkey}')
@classmethod
def pubkey_script(cls, pubkey):
cls.validate_pubkey(pubkey)
return Script.push_data(pubkey) + bytes([OpCodes.OP_CHECKSIG])
@classmethod
def multisig_script(cls, m, pubkeys):
"""Returns the script for a pay-to-multisig transaction."""
n = len(pubkeys)
if not 1 <= m <= n <= 15:
raise ScriptError(f'{m:d} of {n:d} multisig script not possible')
for pubkey in pubkeys:
cls.validate_pubkey(pubkey, req_compressed=True)
# See https://bitcoin.org/en/developer-guide
# 2 of 3 is: OP_2 pubkey1 pubkey2 pubkey3 OP_3 OP_CHECKMULTISIG
return (bytes([OP_1 + m - 1])
+ b''.join(cls.push_data(pubkey) for pubkey in pubkeys)
+ bytes([OP_1 + n - 1, OP_CHECK_MULTISIG]))
class Script:
@classmethod
def get_ops(cls, script):
ops = []
# The unpacks or script[n] below throw on truncated scripts
try:
n = 0
while n < len(script):
op = script[n]
n += 1
if op <= OpCodes.OP_PUSHDATA4:
# Raw bytes follow
if op < OpCodes.OP_PUSHDATA1:
dlen = op
elif op == OpCodes.OP_PUSHDATA1:
dlen = script[n]
n += 1
elif op == OpCodes.OP_PUSHDATA2:
dlen, = unpack_le_uint16_from(script[n: n + 2])
n += 2
else:
dlen, = unpack_le_uint32_from(script[n: n + 4])
n += 4
if n + dlen > len(script):
raise IndexError
op = (op, script[n:n + dlen])
n += dlen
ops.append(op)
except Exception:
# Truncated script; e.g. tx_hash
# ebc9fa1196a59e192352d76c0f6e73167046b9d37b8302b6bb6968dfd279b767
raise ScriptError('truncated script')
return ops
@classmethod
def push_data(cls, data):
"""Returns the opcodes to push the data on the stack."""
assert isinstance(data, (bytes, bytearray))
n = len(data)
if n < OpCodes.OP_PUSHDATA1:
return bytes([n]) + data
if n < 256:
return bytes([OpCodes.OP_PUSHDATA1, n]) + data
if n < 65536:
return bytes([OpCodes.OP_PUSHDATA2]) + pack_le_uint16(n) + data
return bytes([OpCodes.OP_PUSHDATA4]) + pack_le_uint32(n) + data
@classmethod
def opcode_name(cls, opcode):
if OpCodes.OP_0 < opcode < OpCodes.OP_PUSHDATA1:
return f'OP_{opcode:d}'
try:
return OpCodes.whatis(opcode)
except KeyError:
return f'OP_UNKNOWN:{opcode:d}'
@classmethod
def dump(cls, script):
opcodes, datas = cls.get_ops(script)
for opcode, data in zip(opcodes, datas):
name = cls.opcode_name(opcode)
if data is None:
print(name)
else:
print(f'{name} {data.hex()} ({len(data):d} bytes)')

View file

@ -1,91 +0,0 @@
import signal
import logging
import asyncio
from concurrent.futures.thread import ThreadPoolExecutor
import typing
import lbry
from lbry.wallet.server.mempool import MemPool
from lbry.wallet.server.block_processor import BlockProcessor
from lbry.wallet.server.leveldb import LevelDB
from lbry.wallet.server.session import LBRYSessionManager
from lbry.prometheus import PrometheusServer
class Server:
def __init__(self, env):
self.env = env
self.log = logging.getLogger(__name__).getChild(self.__class__.__name__)
self.shutdown_event = asyncio.Event()
self.cancellable_tasks = []
self.daemon = daemon = env.coin.DAEMON(env.coin, env.daemon_url)
self.db = db = LevelDB(env)
self.bp = bp = BlockProcessor(env, db, daemon, self.shutdown_event)
self.prometheus_server: typing.Optional[PrometheusServer] = None
self.session_mgr = LBRYSessionManager(
env, db, bp, daemon, self.shutdown_event
)
self._indexer_task = None
async def start(self):
env = self.env
min_str, max_str = env.coin.SESSIONCLS.protocol_min_max_strings()
self.log.info(f'software version: {lbry.__version__}')
self.log.info(f'supported protocol versions: {min_str}-{max_str}')
self.log.info(f'event loop policy: {env.loop_policy}')
self.log.info(f'reorg limit is {env.reorg_limit:,d} blocks')
await self.daemon.height()
def _start_cancellable(run, *args):
_flag = asyncio.Event()
self.cancellable_tasks.append(asyncio.ensure_future(run(*args, _flag)))
return _flag.wait()
await self.start_prometheus()
if self.env.udp_port:
await self.bp.status_server.start(
0, bytes.fromhex(self.bp.coin.GENESIS_HASH)[::-1], self.env.country,
self.env.host, self.env.udp_port, self.env.allow_lan_udp
)
await _start_cancellable(self.bp.fetch_and_process_blocks)
await self.db.populate_header_merkle_cache()
await _start_cancellable(self.bp.mempool.keep_synchronized)
await _start_cancellable(self.session_mgr.serve, self.bp.mempool)
async def stop(self):
for task in reversed(self.cancellable_tasks):
task.cancel()
await asyncio.wait(self.cancellable_tasks)
if self.prometheus_server:
await self.prometheus_server.stop()
self.prometheus_server = None
self.shutdown_event.set()
await self.daemon.close()
def run(self):
loop = asyncio.get_event_loop()
executor = ThreadPoolExecutor(self.env.max_query_workers, thread_name_prefix='hub-worker')
loop.set_default_executor(executor)
def __exit():
raise SystemExit()
try:
loop.add_signal_handler(signal.SIGINT, __exit)
loop.add_signal_handler(signal.SIGTERM, __exit)
loop.run_until_complete(self.start())
loop.run_until_complete(self.shutdown_event.wait())
except (SystemExit, KeyboardInterrupt):
pass
finally:
loop.run_until_complete(self.stop())
executor.shutdown(True)
async def start_prometheus(self):
if not self.prometheus_server and self.env.prometheus_port:
self.prometheus_server = PrometheusServer()
await self.prometheus_server.start("0.0.0.0", self.env.prometheus_port)

File diff suppressed because it is too large Load diff

View file

@ -1,626 +0,0 @@
# Copyright (c) 2016-2017, Neil Booth
# Copyright (c) 2017, the ElectrumX authors
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# and warranty status of this software.
"""Transaction-related classes and functions."""
import typing
from collections import namedtuple
from lbry.wallet.server.hash import sha256, double_sha256, hash_to_hex_str
from lbry.wallet.server.script import OpCodes
from lbry.wallet.server.util import (
unpack_le_int32_from, unpack_le_int64_from, unpack_le_uint16_from,
unpack_le_uint32_from, unpack_le_uint64_from, pack_le_int32, pack_varint,
pack_le_uint32, pack_le_int64, pack_varbytes,
)
ZERO = bytes(32)
MINUS_1 = 4294967295
class Tx(typing.NamedTuple):
version: int
inputs: typing.List['TxInput']
outputs: typing.List['TxOutput']
locktime: int
raw: bytes
class TxInput(typing.NamedTuple):
prev_hash: bytes
prev_idx: int
script: bytes
sequence: int
"""Class representing a transaction input."""
def __str__(self):
script = self.script.hex()
prev_hash = hash_to_hex_str(self.prev_hash)
return (f"Input({prev_hash}, {self.prev_idx:d}, script={script}, sequence={self.sequence:d})")
def is_generation(self):
"""Test if an input is generation/coinbase like"""
return self.prev_idx == MINUS_1 and self.prev_hash == ZERO
def serialize(self):
return b''.join((
self.prev_hash,
pack_le_uint32(self.prev_idx),
pack_varbytes(self.script),
pack_le_uint32(self.sequence),
))
class TxOutput(typing.NamedTuple):
value: int
pk_script: bytes
def serialize(self):
return b''.join((
pack_le_int64(self.value),
pack_varbytes(self.pk_script),
))
class Deserializer:
"""Deserializes blocks into transactions.
External entry points are read_tx(), read_tx_and_hash(),
read_tx_and_vsize() and read_block().
This code is performance sensitive as it is executed 100s of
millions of times during sync.
"""
TX_HASH_FN = staticmethod(double_sha256)
def __init__(self, binary, start=0):
assert isinstance(binary, bytes)
self.binary = binary
self.binary_length = len(binary)
self.cursor = start
self.flags = 0
def read_tx(self):
"""Return a deserialized transaction."""
start = self.cursor
return Tx(
self._read_le_int32(), # version
self._read_inputs(), # inputs
self._read_outputs(), # outputs
self._read_le_uint32(), # locktime
self.binary[start:self.cursor],
)
def read_tx_and_hash(self):
"""Return a (deserialized TX, tx_hash) pair.
The hash needs to be reversed for human display; for efficiency
we process it in the natural serialized order.
"""
start = self.cursor
return self.read_tx(), self.TX_HASH_FN(self.binary[start:self.cursor])
def read_tx_and_vsize(self):
"""Return a (deserialized TX, vsize) pair."""
return self.read_tx(), self.binary_length
def read_tx_block(self):
"""Returns a list of (deserialized_tx, tx_hash) pairs."""
read = self.read_tx_and_hash
# Some coins have excess data beyond the end of the transactions
return [read() for _ in range(self._read_varint())]
def _read_inputs(self):
read_input = self._read_input
return [read_input() for i in range(self._read_varint())]
def _read_input(self):
return TxInput(
self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script
self._read_le_uint32() # sequence
)
def _read_outputs(self):
read_output = self._read_output
return [read_output() for i in range(self._read_varint())]
def _read_output(self):
return TxOutput(
self._read_le_int64(), # value
self._read_varbytes(), # pk_script
)
def _read_byte(self):
cursor = self.cursor
self.cursor += 1
return self.binary[cursor]
def _read_nbytes(self, n):
cursor = self.cursor
self.cursor = end = cursor + n
assert self.binary_length >= end
return self.binary[cursor:end]
def _read_varbytes(self):
return self._read_nbytes(self._read_varint())
def _read_varint(self):
n = self.binary[self.cursor]
self.cursor += 1
if n < 253:
return n
if n == 253:
return self._read_le_uint16()
if n == 254:
return self._read_le_uint32()
return self._read_le_uint64()
def _read_le_int32(self):
result, = unpack_le_int32_from(self.binary, self.cursor)
self.cursor += 4
return result
def _read_le_int64(self):
result, = unpack_le_int64_from(self.binary, self.cursor)
self.cursor += 8
return result
def _read_le_uint16(self):
result, = unpack_le_uint16_from(self.binary, self.cursor)
self.cursor += 2
return result
def _read_le_uint32(self):
result, = unpack_le_uint32_from(self.binary, self.cursor)
self.cursor += 4
return result
def _read_le_uint64(self):
result, = unpack_le_uint64_from(self.binary, self.cursor)
self.cursor += 8
return result
class TxSegWit(namedtuple("Tx", "version marker flag inputs outputs "
"witness locktime raw")):
"""Class representing a SegWit transaction."""
class DeserializerSegWit(Deserializer):
# https://bitcoincore.org/en/segwit_wallet_dev/#transaction-serialization
def _read_witness(self, fields):
read_witness_field = self._read_witness_field
return [read_witness_field() for i in range(fields)]
def _read_witness_field(self):
read_varbytes = self._read_varbytes
return [read_varbytes() for i in range(self._read_varint())]
def _read_tx_parts(self):
"""Return a (deserialized TX, tx_hash, vsize) tuple."""
start = self.cursor
marker = self.binary[self.cursor + 4]
if marker:
tx = super().read_tx()
tx_hash = self.TX_HASH_FN(self.binary[start:self.cursor])
return tx, tx_hash, self.binary_length
# Ugh, this is nasty.
version = self._read_le_int32()
orig_ser = self.binary[start:self.cursor]
marker = self._read_byte()
flag = self._read_byte()
start = self.cursor
inputs = self._read_inputs()
outputs = self._read_outputs()
orig_ser += self.binary[start:self.cursor]
base_size = self.cursor - start
witness = self._read_witness(len(inputs))
start = self.cursor
locktime = self._read_le_uint32()
orig_ser += self.binary[start:self.cursor]
vsize = (3 * base_size + self.binary_length) // 4
return TxSegWit(version, marker, flag, inputs, outputs, witness,
locktime, orig_ser), self.TX_HASH_FN(orig_ser), vsize
def read_tx(self):
return self._read_tx_parts()[0]
def read_tx_and_hash(self):
tx, tx_hash, vsize = self._read_tx_parts()
return tx, tx_hash
def read_tx_and_vsize(self):
tx, tx_hash, vsize = self._read_tx_parts()
return tx, vsize
class DeserializerAuxPow(Deserializer):
VERSION_AUXPOW = (1 << 8)
def read_header(self, height, static_header_size):
"""Return the AuxPow block header bytes"""
start = self.cursor
version = self._read_le_uint32()
if version & self.VERSION_AUXPOW:
# We are going to calculate the block size then read it as bytes
self.cursor = start
self.cursor += static_header_size # Block normal header
self.read_tx() # AuxPow transaction
self.cursor += 32 # Parent block hash
merkle_size = self._read_varint()
self.cursor += 32 * merkle_size # Merkle branch
self.cursor += 4 # Index
merkle_size = self._read_varint()
self.cursor += 32 * merkle_size # Chain merkle branch
self.cursor += 4 # Chain index
self.cursor += 80 # Parent block header
header_end = self.cursor
else:
header_end = static_header_size
self.cursor = start
return self._read_nbytes(header_end)
class DeserializerAuxPowSegWit(DeserializerSegWit, DeserializerAuxPow):
pass
class DeserializerEquihash(Deserializer):
def read_header(self, height, static_header_size):
"""Return the block header bytes"""
start = self.cursor
# We are going to calculate the block size then read it as bytes
self.cursor += static_header_size
solution_size = self._read_varint()
self.cursor += solution_size
header_end = self.cursor
self.cursor = start
return self._read_nbytes(header_end)
class DeserializerEquihashSegWit(DeserializerSegWit, DeserializerEquihash):
pass
class TxJoinSplit(namedtuple("Tx", "version inputs outputs locktime")):
"""Class representing a JoinSplit transaction."""
class DeserializerZcash(DeserializerEquihash):
def read_tx(self):
header = self._read_le_uint32()
overwintered = ((header >> 31) == 1)
if overwintered:
version = header & 0x7fffffff
self.cursor += 4 # versionGroupId
else:
version = header
is_overwinter_v3 = version == 3
is_sapling_v4 = version == 4
base_tx = TxJoinSplit(
version,
self._read_inputs(), # inputs
self._read_outputs(), # outputs
self._read_le_uint32() # locktime
)
if is_overwinter_v3 or is_sapling_v4:
self.cursor += 4 # expiryHeight
has_shielded = False
if is_sapling_v4:
self.cursor += 8 # valueBalance
shielded_spend_size = self._read_varint()
self.cursor += shielded_spend_size * 384 # vShieldedSpend
shielded_output_size = self._read_varint()
self.cursor += shielded_output_size * 948 # vShieldedOutput
has_shielded = shielded_spend_size > 0 or shielded_output_size > 0
if base_tx.version >= 2:
joinsplit_size = self._read_varint()
if joinsplit_size > 0:
joinsplit_desc_len = 1506 + (192 if is_sapling_v4 else 296)
# JSDescription
self.cursor += joinsplit_size * joinsplit_desc_len
self.cursor += 32 # joinSplitPubKey
self.cursor += 64 # joinSplitSig
if is_sapling_v4 and has_shielded:
self.cursor += 64 # bindingSig
return base_tx
class TxTime(namedtuple("Tx", "version time inputs outputs locktime")):
"""Class representing transaction that has a time field."""
class DeserializerTxTime(Deserializer):
def read_tx(self):
return TxTime(
self._read_le_int32(), # version
self._read_le_uint32(), # time
self._read_inputs(), # inputs
self._read_outputs(), # outputs
self._read_le_uint32(), # locktime
)
class DeserializerReddcoin(Deserializer):
def read_tx(self):
version = self._read_le_int32()
inputs = self._read_inputs()
outputs = self._read_outputs()
locktime = self._read_le_uint32()
if version > 1:
time = self._read_le_uint32()
else:
time = 0
return TxTime(version, time, inputs, outputs, locktime)
class DeserializerTxTimeAuxPow(DeserializerTxTime):
VERSION_AUXPOW = (1 << 8)
def is_merged_block(self):
start = self.cursor
self.cursor = 0
version = self._read_le_uint32()
self.cursor = start
if version & self.VERSION_AUXPOW:
return True
return False
def read_header(self, height, static_header_size):
"""Return the AuxPow block header bytes"""
start = self.cursor
version = self._read_le_uint32()
if version & self.VERSION_AUXPOW:
# We are going to calculate the block size then read it as bytes
self.cursor = start
self.cursor += static_header_size # Block normal header
self.read_tx() # AuxPow transaction
self.cursor += 32 # Parent block hash
merkle_size = self._read_varint()
self.cursor += 32 * merkle_size # Merkle branch
self.cursor += 4 # Index
merkle_size = self._read_varint()
self.cursor += 32 * merkle_size # Chain merkle branch
self.cursor += 4 # Chain index
self.cursor += 80 # Parent block header
header_end = self.cursor
else:
header_end = static_header_size
self.cursor = start
return self._read_nbytes(header_end)
class DeserializerBitcoinAtom(DeserializerSegWit):
FORK_BLOCK_HEIGHT = 505888
def read_header(self, height, static_header_size):
"""Return the block header bytes"""
header_len = static_header_size
if height >= self.FORK_BLOCK_HEIGHT:
header_len += 4 # flags
return self._read_nbytes(header_len)
class DeserializerGroestlcoin(DeserializerSegWit):
TX_HASH_FN = staticmethod(sha256)
class TxInputTokenPay(TxInput):
"""Class representing a TokenPay transaction input."""
OP_ANON_MARKER = 0xb9
# 2byte marker (cpubkey + sigc + sigr)
MIN_ANON_IN_SIZE = 2 + (33 + 32 + 32)
def _is_anon_input(self):
return (len(self.script) >= self.MIN_ANON_IN_SIZE and
self.script[0] == OpCodes.OP_RETURN and
self.script[1] == self.OP_ANON_MARKER)
def is_generation(self):
# Transactions coming in from stealth addresses are seen by
# the blockchain as newly minted coins. The reverse, where coins
# are sent TO a stealth address, are seen by the blockchain as
# a coin burn.
if self._is_anon_input():
return True
return super().is_generation()
class TxInputTokenPayStealth(
namedtuple("TxInput", "keyimage ringsize script sequence")):
"""Class representing a TokenPay stealth transaction input."""
def __str__(self):
script = self.script.hex()
keyimage = bytes(self.keyimage).hex()
return (f"Input({keyimage}, {self.ringsize[1]:d}, script={script}, sequence={self.sequence:d})")
def is_generation(self):
return True
def serialize(self):
return b''.join((
self.keyimage,
self.ringsize,
pack_varbytes(self.script),
pack_le_uint32(self.sequence),
))
class DeserializerTokenPay(DeserializerTxTime):
def _read_input(self):
txin = TxInputTokenPay(
self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx
self._read_varbytes(), # script
self._read_le_uint32(), # sequence
)
if txin._is_anon_input():
# Not sure if this is actually needed, and seems
# extra work for no immediate benefit, but it at
# least correctly represents a stealth input
raw = txin.serialize()
deserializer = Deserializer(raw)
txin = TxInputTokenPayStealth(
deserializer._read_nbytes(33), # keyimage
deserializer._read_nbytes(3), # ringsize
deserializer._read_varbytes(), # script
deserializer._read_le_uint32() # sequence
)
return txin
# Decred
class TxInputDcr(namedtuple("TxInput", "prev_hash prev_idx tree sequence")):
"""Class representing a Decred transaction input."""
def __str__(self):
prev_hash = hash_to_hex_str(self.prev_hash)
return (f"Input({prev_hash}, {self.prev_idx:d}, tree={self.tree}, sequence={self.sequence:d})")
def is_generation(self):
"""Test if an input is generation/coinbase like"""
return self.prev_idx == MINUS_1 and self.prev_hash == ZERO
class TxOutputDcr(namedtuple("TxOutput", "value version pk_script")):
"""Class representing a Decred transaction output."""
pass
class TxDcr(namedtuple("Tx", "version inputs outputs locktime expiry "
"witness")):
"""Class representing a Decred transaction."""
class DeserializerDecred(Deserializer):
@staticmethod
def blake256(data):
from blake256.blake256 import blake_hash
return blake_hash(data)
@staticmethod
def blake256d(data):
from blake256.blake256 import blake_hash
return blake_hash(blake_hash(data))
def read_tx(self):
return self._read_tx_parts(produce_hash=False)[0]
def read_tx_and_hash(self):
tx, tx_hash, vsize = self._read_tx_parts()
return tx, tx_hash
def read_tx_and_vsize(self):
tx, tx_hash, vsize = self._read_tx_parts(produce_hash=False)
return tx, vsize
def read_tx_block(self):
"""Returns a list of (deserialized_tx, tx_hash) pairs."""
read = self.read_tx_and_hash
txs = [read() for _ in range(self._read_varint())]
stxs = [read() for _ in range(self._read_varint())]
return txs + stxs
def read_tx_tree(self):
"""Returns a list of deserialized_tx without tx hashes."""
read_tx = self.read_tx
return [read_tx() for _ in range(self._read_varint())]
def _read_input(self):
return TxInputDcr(
self._read_nbytes(32), # prev_hash
self._read_le_uint32(), # prev_idx
self._read_byte(), # tree
self._read_le_uint32(), # sequence
)
def _read_output(self):
return TxOutputDcr(
self._read_le_int64(), # value
self._read_le_uint16(), # version
self._read_varbytes(), # pk_script
)
def _read_witness(self, fields):
read_witness_field = self._read_witness_field
assert fields == self._read_varint()
return [read_witness_field() for _ in range(fields)]
def _read_witness_field(self):
value_in = self._read_le_int64()
block_height = self._read_le_uint32()
block_index = self._read_le_uint32()
script = self._read_varbytes()
return value_in, block_height, block_index, script
def _read_tx_parts(self, produce_hash=True):
start = self.cursor
version = self._read_le_int32()
inputs = self._read_inputs()
outputs = self._read_outputs()
locktime = self._read_le_uint32()
expiry = self._read_le_uint32()
end_prefix = self.cursor
witness = self._read_witness(len(inputs))
if produce_hash:
# TxSerializeNoWitness << 16 == 0x10000
no_witness_header = pack_le_uint32(0x10000 | (version & 0xffff))
prefix_tx = no_witness_header + self.binary[start+4:end_prefix]
tx_hash = self.blake256(prefix_tx)
else:
tx_hash = None
return TxDcr(
version,
inputs,
outputs,
locktime,
expiry,
witness
), tx_hash, self.cursor - start

View file

@ -1,361 +0,0 @@
# Copyright (c) 2016-2017, Neil Booth
#
# All rights reserved.
#
# The MIT License (MIT)
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
# and warranty status of this software.
"""Miscellaneous utility classes and functions."""
import array
import inspect
from ipaddress import ip_address
import logging
import re
import sys
from collections import Container, Mapping
from struct import pack, Struct
# Logging utilities
class ConnectionLogger(logging.LoggerAdapter):
"""Prepends a connection identifier to a logging message."""
def process(self, msg, kwargs):
conn_id = self.extra.get('conn_id', 'unknown')
return f'[{conn_id}] {msg}', kwargs
class CompactFormatter(logging.Formatter):
"""Strips the module from the logger name to leave the class only."""
def format(self, record):
record.name = record.name.rpartition('.')[-1]
return super().format(record)
def make_logger(name, *, handler, level):
"""Return the root ElectrumX logger."""
logger = logging.getLogger(name)
logger.addHandler(handler)
logger.setLevel(logging.INFO)
logger.propagate = False
return logger
def class_logger(path, classname):
"""Return a hierarchical logger for a class."""
return logging.getLogger(path).getChild(classname)
# Method decorator. To be used for calculations that will always
# deliver the same result. The method cannot take any arguments
# and should be accessed as an attribute.
class cachedproperty:
def __init__(self, f):
self.f = f
def __get__(self, obj, type):
obj = obj or type
value = self.f(obj)
setattr(obj, self.f.__name__, value)
return value
def formatted_time(t, sep=' '):
"""Return a number of seconds as a string in days, hours, mins and
maybe secs."""
t = int(t)
fmts = (('{:d}d', 86400), ('{:02d}h', 3600), ('{:02d}m', 60))
parts = []
for fmt, n in fmts:
val = t // n
if parts or val:
parts.append(fmt.format(val))
t %= n
if len(parts) < 3:
parts.append(f'{t:02d}s')
return sep.join(parts)
def deep_getsizeof(obj):
"""Find the memory footprint of a Python object.
Based on code from code.tutsplus.com: http://goo.gl/fZ0DXK
This is a recursive function that drills down a Python object graph
like a dictionary holding nested dictionaries with lists of lists
and tuples and sets.
The sys.getsizeof function does a shallow size of only. It counts each
object inside a container as pointer only regardless of how big it
really is.
"""
ids = set()
def size(o):
if id(o) in ids:
return 0
r = sys.getsizeof(o)
ids.add(id(o))
if isinstance(o, (str, bytes, bytearray, array.array)):
return r
if isinstance(o, Mapping):
return r + sum(size(k) + size(v) for k, v in o.items())
if isinstance(o, Container):
return r + sum(size(x) for x in o)
return r
return size(obj)
def subclasses(base_class, strict=True):
"""Return a list of subclasses of base_class in its module."""
def select(obj):
return (inspect.isclass(obj) and issubclass(obj, base_class) and
(not strict or obj != base_class))
pairs = inspect.getmembers(sys.modules[base_class.__module__], select)
return [pair[1] for pair in pairs]
def chunks(items, size):
"""Break up items, an iterable, into chunks of length size."""
for i in range(0, len(items), size):
yield items[i: i + size]
def resolve_limit(limit):
if limit is None:
return -1
assert isinstance(limit, int) and limit >= 0
return limit
def bytes_to_int(be_bytes):
"""Interprets a big-endian sequence of bytes as an integer"""
return int.from_bytes(be_bytes, 'big')
def int_to_bytes(value):
"""Converts an integer to a big-endian sequence of bytes"""
return value.to_bytes((value.bit_length() + 7) // 8, 'big')
def increment_byte_string(bs):
"""Return the lexicographically next byte string of the same length.
Return None if there is none (when the input is all 0xff bytes)."""
for n in range(1, len(bs) + 1):
if bs[-n] != 0xff:
return bs[:-n] + bytes([bs[-n] + 1]) + bytes(n - 1)
return None
class LogicalFile:
"""A logical binary file split across several separate files on disk."""
def __init__(self, prefix, digits, file_size):
digit_fmt = f'{{:0{digits:d}d}}'
self.filename_fmt = prefix + digit_fmt
self.file_size = file_size
def read(self, start, size=-1):
"""Read up to size bytes from the virtual file, starting at offset
start, and return them.
If size is -1 all bytes are read."""
parts = []
while size != 0:
try:
with self.open_file(start, False) as f:
part = f.read(size)
if not part:
break
except FileNotFoundError:
break
parts.append(part)
start += len(part)
if size > 0:
size -= len(part)
return b''.join(parts)
def write(self, start, b):
"""Write the bytes-like object, b, to the underlying virtual file."""
while b:
size = min(len(b), self.file_size - (start % self.file_size))
with self.open_file(start, True) as f:
f.write(b if size == len(b) else b[:size])
b = b[size:]
start += size
def open_file(self, start, create):
"""Open the virtual file and seek to start. Return a file handle.
Raise FileNotFoundError if the file does not exist and create
is False.
"""
file_num, offset = divmod(start, self.file_size)
filename = self.filename_fmt.format(file_num)
f = open_file(filename, create)
f.seek(offset)
return f
def open_file(filename, create=False):
"""Open the file name. Return its handle."""
try:
return open(filename, 'rb+')
except FileNotFoundError:
if create:
return open(filename, 'wb+')
raise
def open_truncate(filename):
"""Open the file name. Return its handle."""
return open(filename, 'wb+')
def address_string(address):
"""Return an address as a correctly formatted string."""
fmt = '{}:{:d}'
host, port = address
try:
host = ip_address(host)
except ValueError:
pass
else:
if host.version == 6:
fmt = '[{}]:{:d}'
return fmt.format(host, port)
# See http://stackoverflow.com/questions/2532053/validate-a-hostname-string
# Note underscores are valid in domain names, but strictly invalid in host
# names. We ignore that distinction.
SEGMENT_REGEX = re.compile("(?!-)[A-Z_\\d-]{1,63}(?<!-)$", re.IGNORECASE)
def is_valid_hostname(hostname):
if len(hostname) > 255:
return False
# strip exactly one dot from the right, if present
if hostname and hostname[-1] == ".":
hostname = hostname[:-1]
return all(SEGMENT_REGEX.match(x) for x in hostname.split("."))
def protocol_tuple(s):
"""Converts a protocol version number, such as "1.0" to a tuple (1, 0).
If the version number is bad, (0, ) indicating version 0 is returned."""
try:
return tuple(int(part) for part in s.split('.'))
except Exception:
return (0, )
def version_string(ptuple):
"""Convert a version tuple such as (1, 2) to "1.2".
There is always at least one dot, so (1, ) becomes "1.0"."""
while len(ptuple) < 2:
ptuple += (0, )
return '.'.join(str(p) for p in ptuple)
def protocol_version(client_req, min_tuple, max_tuple):
"""Given a client's protocol version string, return a pair of
protocol tuples:
(negotiated version, client min request)
If the request is unsupported, the negotiated protocol tuple is
None.
"""
if client_req is None:
client_min = client_max = min_tuple
else:
if isinstance(client_req, list) and len(client_req) == 2:
client_min, client_max = client_req
else:
client_min = client_max = client_req
client_min = protocol_tuple(client_min)
client_max = protocol_tuple(client_max)
result = min(client_max, max_tuple)
if result < max(client_min, min_tuple) or result == (0, ):
result = None
return result, client_min
struct_le_i = Struct('<i')
struct_le_q = Struct('<q')
struct_le_H = Struct('<H')
struct_le_I = Struct('<I')
struct_le_Q = Struct('<Q')
struct_be_H = Struct('>H')
struct_be_I = Struct('>I')
structB = Struct('B')
unpack_le_int32_from = struct_le_i.unpack_from
unpack_le_int64_from = struct_le_q.unpack_from
unpack_le_uint16_from = struct_le_H.unpack_from
unpack_le_uint32_from = struct_le_I.unpack_from
unpack_le_uint64_from = struct_le_Q.unpack_from
unpack_be_uint16_from = struct_be_H.unpack_from
unpack_be_uint32_from = struct_be_I.unpack_from
unpack_be_uint64 = lambda x: int.from_bytes(x, byteorder='big')
pack_le_int32 = struct_le_i.pack
pack_le_int64 = struct_le_q.pack
pack_le_uint16 = struct_le_H.pack
pack_le_uint32 = struct_le_I.pack
pack_be_uint64 = lambda x: x.to_bytes(8, byteorder='big')
pack_be_uint16 = lambda x: x.to_bytes(2, byteorder='big')
pack_be_uint32 = struct_be_I.pack
pack_byte = structB.pack
hex_to_bytes = bytes.fromhex
def pack_varint(n):
if n < 253:
return pack_byte(n)
if n < 65536:
return pack_byte(253) + pack_le_uint16(n)
if n < 4294967296:
return pack_byte(254) + pack_le_uint32(n)
return pack_byte(255) + pack_le_uint64(n)
def pack_varbytes(data):
return pack_varint(len(data)) + data

View file

@ -1,3 +0,0 @@
# need this to avoid circular import
PROTOCOL_MIN = (0, 54, 0)
PROTOCOL_MAX = (0, 199, 0)

View file

@ -1,55 +0,0 @@
import asyncio
from weakref import WeakSet
from aiohttp.web import Application, AppRunner, WebSocketResponse, TCPSite
from aiohttp.http_websocket import WSMsgType, WSCloseCode
class AdminWebSocket:
def __init__(self, manager):
self.manager = manager
self.app = Application()
self.app['websockets'] = WeakSet()
self.app.router.add_get('/', self.on_connect)
self.app.on_shutdown.append(self.on_shutdown)
self.runner = AppRunner(self.app)
async def on_status(self, _):
if not self.app['websockets']:
return
self.send_message({
'type': 'status',
'height': self.manager.daemon.cached_height(),
})
def send_message(self, msg):
for web_socket in self.app['websockets']:
asyncio.create_task(web_socket.send_json(msg))
async def start(self):
await self.runner.setup()
await TCPSite(self.runner, self.manager.env.websocket_host, self.manager.env.websocket_port).start()
async def stop(self):
await self.runner.cleanup()
async def on_connect(self, request):
web_socket = WebSocketResponse()
await web_socket.prepare(request)
self.app['websockets'].add(web_socket)
try:
async for msg in web_socket:
if msg.type == WSMsgType.TEXT:
await self.on_status(None)
elif msg.type == WSMsgType.ERROR:
print('web socket connection closed with exception %s' %
web_socket.exception())
finally:
self.app['websockets'].discard(web_socket)
return web_socket
@staticmethod
async def on_shutdown(app):
for web_socket in set(app['websockets']):
await web_socket.close(code=WSCloseCode.GOING_AWAY, message='Server shutdown')

View file

@ -7,18 +7,18 @@ from typing import List, Iterable, Optional, Tuple
from lbry.error import InsufficientFundsError from lbry.error import InsufficientFundsError
from lbry.crypto.hash import hash160, sha256 from lbry.crypto.hash import hash160, sha256
from lbry.crypto.base58 import Base58 from lbry.crypto.base58 import Base58
from lbry.schema.url import normalize_name from scribe.schema.url import normalize_name
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.schema.base import Signable from scribe.schema.base import Signable
from lbry.schema.purchase import Purchase from scribe.schema.purchase import Purchase
from lbry.schema.support import Support from scribe.schema.support import Support
from scribe.schema.bip32 import PrivateKey, PublicKey
from .script import InputScript, OutputScript from .script import InputScript, OutputScript
from .constants import COIN, DUST, NULL_HASH32 from .constants import COIN, DUST, NULL_HASH32
from .bcd_data_stream import BCDataStream from .bcd_data_stream import BCDataStream
from .hash import TXRef, TXRefImmutable from .hash import TXRef, TXRefImmutable
from .util import ReadOnlyList from .util import ReadOnlyList
from .bip32 import PrivateKey, PublicKey
if typing.TYPE_CHECKING: if typing.TYPE_CHECKING:
from lbry.wallet.account import Account from lbry.wallet.account import Account

View file

@ -4,7 +4,7 @@ from time import perf_counter
import logging import logging
from typing import Optional, Tuple, NamedTuple from typing import Optional, Tuple, NamedTuple
from lbry.utils import LRUCache, is_valid_public_ipv4 from lbry.utils import LRUCache, is_valid_public_ipv4
from lbry.schema.attrs import country_str_to_int, country_int_to_str from scribe.schema.attrs import country_str_to_int, country_int_to_str
# from prometheus_client import Counter # from prometheus_client import Counter
@ -110,6 +110,7 @@ class SPVServerStatusProtocol(asyncio.DatagramProtocol):
self._min_delay = 1 / throttle_reqs_per_sec self._min_delay = 1 / throttle_reqs_per_sec
self._allow_localhost = allow_localhost self._allow_localhost = allow_localhost
self._allow_lan = allow_lan self._allow_lan = allow_lan
self.closed = asyncio.Event()
def update_cached_response(self): def update_cached_response(self):
self._left_cache, self._right_cache = SPVPong.make_sans_source_address( self._left_cache, self._right_cache = SPVPong.make_sans_source_address(
@ -160,20 +161,25 @@ class SPVServerStatusProtocol(asyncio.DatagramProtocol):
def connection_made(self, transport) -> None: def connection_made(self, transport) -> None:
self.transport = transport self.transport = transport
self.closed.clear()
def connection_lost(self, exc: Optional[Exception]) -> None: def connection_lost(self, exc: Optional[Exception]) -> None:
self.transport = None self.transport = None
self.closed.set()
def close(self): async def close(self):
if self.transport: if self.transport:
self.transport.close() self.transport.close()
await self.closed.wait()
class StatusServer: class StatusServer:
def __init__(self): def __init__(self):
1/0
self._protocol: Optional[SPVServerStatusProtocol] = None self._protocol: Optional[SPVServerStatusProtocol] = None
async def start(self, height: int, tip: bytes, country: str, interface: str, port: int, allow_lan: bool = False): async def start(self, height: int, tip: bytes, country: str, interface: str, port: int, allow_lan: bool = False):
1/0
if self.is_running: if self.is_running:
return return
loop = asyncio.get_event_loop() loop = asyncio.get_event_loop()
@ -184,9 +190,9 @@ class StatusServer:
await loop.create_datagram_endpoint(lambda: self._protocol, (interface, port)) await loop.create_datagram_endpoint(lambda: self._protocol, (interface, port))
log.info("started udp status server on %s:%i", interface, port) log.info("started udp status server on %s:%i", interface, port)
def stop(self): async def stop(self):
if self.is_running: if self.is_running:
self._protocol.close() await self._protocol.close()
self._protocol = None self._protocol = None
@property @property

View file

@ -40,22 +40,17 @@ def checkrecord(record, expected_winner, expected_claim):
async def checkcontrolling(daemon: Daemon, db: SQLDB): async def checkcontrolling(daemon: Daemon, db: SQLDB):
records, claim_ids, names, futs = [], [], [], [] records, names, futs = [], [], []
for record in db.get_claims('claimtrie.claim_hash as is_controlling, claim.*', is_controlling=True): for record in db.get_claims('claimtrie.claim_hash as is_controlling, claim.*', is_controlling=True):
records.append(record) records.append(record)
claim_id = hex_reverted(record['claim_hash']) claim_id = hex_reverted(record['claim_hash'])
claim_ids.append((claim_id,)) names.append((record['normalized'], (claim_id,), "", True)) # last parameter is IncludeValues
names.append((record['normalized'],))
if len(names) > 50000: if len(names) > 50000:
futs.append(daemon._send_vector('getvalueforname', names[:])) futs.append(daemon._send_vector('getclaimsfornamebyid', names))
futs.append(daemon._send_vector('getclaimbyid', claim_ids[:]))
names.clear() names.clear()
claim_ids.clear()
if names: if names:
futs.append(daemon._send_vector('getvalueforname', names[:])) futs.append(daemon._send_vector('getclaimsfornamebyid', names))
futs.append(daemon._send_vector('getclaimbyid', claim_ids[:]))
names.clear() names.clear()
claim_ids.clear()
while futs: while futs:
winners, claims = futs.pop(0), futs.pop(0) winners, claims = futs.pop(0), futs.pop(0)

View file

@ -1,12 +1,12 @@
#!/bin/bash #!/bin/bash
SNAPSHOT_HEIGHT="1049658" SNAPSHOT_HEIGHT="1072108"
HUB_VOLUME_PATH="/var/lib/docker/volumes/${USER}_wallet_server" HUB_VOLUME_PATH="/var/lib/docker/volumes/${USER}_wallet_server"
ES_VOLUME_PATH="/var/lib/docker/volumes/${USER}_es01" ES_VOLUME_PATH="/var/lib/docker/volumes/${USER}_es01"
SNAPSHOT_TAR_NAME="wallet_server_snapshot_${SNAPSHOT_HEIGHT}.tar" SNAPSHOT_TAR_NAME="wallet_server_snapshot_${SNAPSHOT_HEIGHT}.tar.gz"
ES_SNAPSHOT_TAR_NAME="es_snapshot_${SNAPSHOT_HEIGHT}.tar" ES_SNAPSHOT_TAR_NAME="es_snapshot_${SNAPSHOT_HEIGHT}.tar.gz"
SNAPSHOT_URL="https://snapshots.lbry.com/hub/${SNAPSHOT_TAR_NAME}" SNAPSHOT_URL="https://snapshots.lbry.com/hub/${SNAPSHOT_TAR_NAME}"
ES_SNAPSHOT_URL="https://snapshots.lbry.com/hub/${ES_SNAPSHOT_TAR_NAME}" ES_SNAPSHOT_URL="https://snapshots.lbry.com/hub/${ES_SNAPSHOT_TAR_NAME}"

View file

@ -7,9 +7,11 @@ BASE = os.path.dirname(__file__)
with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh: with open(os.path.join(BASE, 'README.md'), encoding='utf-8') as fh:
long_description = fh.read() long_description = fh.read()
PLYVEL = []
if sys.platform.startswith('linux'): ROCKSDB = []
PLYVEL.append('plyvel==1.3.0') if sys.platform.startswith('linux') or sys.platform.startswith('darwin'):
ROCKSDB.append('lbry-rocksdb==0.8.2')
setup( setup(
name=__name__, name=__name__,
@ -28,9 +30,7 @@ setup(
entry_points={ entry_points={
'console_scripts': [ 'console_scripts': [
'lbrynet=lbry.extras.cli:main', 'lbrynet=lbry.extras.cli:main',
'lbry-hub=lbry.wallet.server.cli:main', 'orchstr8=lbry.wallet.orchstr8.cli:main'
'orchstr8=lbry.wallet.orchstr8.cli:main',
'lbry-hub-elastic-sync=lbry.wallet.server.db.elasticsearch.sync:run_elastic_sync'
], ],
}, },
install_requires=[ install_requires=[
@ -58,7 +58,7 @@ setup(
'elasticsearch==7.10.1', 'elasticsearch==7.10.1',
'grpcio==1.38.0', 'grpcio==1.38.0',
'filetype==1.0.9' 'filetype==1.0.9'
] + PLYVEL, ] + ROCKSDB,
extras_require={ extras_require={
'torrent': ['lbry-libtorrent'], 'torrent': ['lbry-libtorrent'],
'lint': ['pylint==2.10.0'], 'lint': ['pylint==2.10.0'],

View file

@ -103,7 +103,7 @@ class AccountManagement(CommandTestCase):
second_account = await self.daemon.jsonrpc_account_create('second account') second_account = await self.daemon.jsonrpc_account_create('second account')
tx = await self.daemon.jsonrpc_account_send( tx = await self.daemon.jsonrpc_account_send(
'0.05', await self.daemon.jsonrpc_address_unused(account_id=second_account.id) '0.05', await self.daemon.jsonrpc_address_unused(account_id=second_account.id), blocking=True
) )
await self.confirm_tx(tx.id) await self.confirm_tx(tx.id)
await self.assertOutputAmount(['0.05', '9.949876'], utxo_list()) await self.assertOutputAmount(['0.05', '9.949876'], utxo_list())

View file

@ -9,7 +9,7 @@ class BlockchainReorganizationTests(CommandTestCase):
VERBOSITY = logging.WARN VERBOSITY = logging.WARN
async def assertBlockHash(self, height): async def assertBlockHash(self, height):
bp = self.conductor.spv_node.server.bp bp = self.conductor.spv_node.writer
def get_txids(): def get_txids():
return [ return [
@ -29,15 +29,16 @@ class BlockchainReorganizationTests(CommandTestCase):
self.assertListEqual(block_txs, list(txs.keys()), msg='leveldb/lbrycrd transactions are of order') self.assertListEqual(block_txs, list(txs.keys()), msg='leveldb/lbrycrd transactions are of order')
async def test_reorg(self): async def test_reorg(self):
bp = self.conductor.spv_node.server.bp bp = self.conductor.spv_node.writer
bp.reorg_count_metric.set(0) bp.reorg_count_metric.set(0)
# invalidate current block, move forward 2 # invalidate current block, move forward 2
height = 206 height = 206
self.assertEqual(self.ledger.headers.height, height) self.assertEqual(self.ledger.headers.height, height)
await self.assertBlockHash(height) await self.assertBlockHash(height)
await self.blockchain.invalidate_block((await self.ledger.headers.hash(206)).decode()) block_hash = (await self.ledger.headers.hash(206)).decode()
await self.blockchain.invalidate_block(block_hash)
await self.blockchain.generate(2) await self.blockchain.generate(2)
await self.ledger.on_header.where(lambda e: e.height == 207) await asyncio.wait_for(self.on_header(207), 3.0)
self.assertEqual(self.ledger.headers.height, 207) self.assertEqual(self.ledger.headers.height, 207)
await self.assertBlockHash(206) await self.assertBlockHash(206)
await self.assertBlockHash(207) await self.assertBlockHash(207)
@ -46,14 +47,14 @@ class BlockchainReorganizationTests(CommandTestCase):
# invalidate current block, move forward 3 # invalidate current block, move forward 3
await self.blockchain.invalidate_block((await self.ledger.headers.hash(206)).decode()) await self.blockchain.invalidate_block((await self.ledger.headers.hash(206)).decode())
await self.blockchain.generate(3) await self.blockchain.generate(3)
await self.ledger.on_header.where(lambda e: e.height == 208) await asyncio.wait_for(self.on_header(208), 3.0)
self.assertEqual(self.ledger.headers.height, 208) self.assertEqual(self.ledger.headers.height, 208)
await self.assertBlockHash(206) await self.assertBlockHash(206)
await self.assertBlockHash(207) await self.assertBlockHash(207)
await self.assertBlockHash(208) await self.assertBlockHash(208)
self.assertEqual(2, bp.reorg_count_metric._samples()[0][2]) self.assertEqual(2, bp.reorg_count_metric._samples()[0][2])
await self.blockchain.generate(3) await self.blockchain.generate(3)
await self.ledger.on_header.where(lambda e: e.height == 211) await asyncio.wait_for(self.on_header(211), 3.0)
await self.assertBlockHash(209) await self.assertBlockHash(209)
await self.assertBlockHash(210) await self.assertBlockHash(210)
await self.assertBlockHash(211) await self.assertBlockHash(211)
@ -62,7 +63,7 @@ class BlockchainReorganizationTests(CommandTestCase):
) )
await self.ledger.wait(still_valid) await self.ledger.wait(still_valid)
await self.blockchain.generate(1) await self.blockchain.generate(1)
await self.ledger.on_header.where(lambda e: e.height == 212) await asyncio.wait_for(self.on_header(212), 1.0)
claim_id = still_valid.outputs[0].claim_id claim_id = still_valid.outputs[0].claim_id
c1 = (await self.resolve(f'still-valid#{claim_id}'))['claim_id'] c1 = (await self.resolve(f'still-valid#{claim_id}'))['claim_id']
c2 = (await self.resolve(f'still-valid#{claim_id[:2]}'))['claim_id'] c2 = (await self.resolve(f'still-valid#{claim_id[:2]}'))['claim_id']
@ -71,7 +72,7 @@ class BlockchainReorganizationTests(CommandTestCase):
abandon_tx = await self.daemon.jsonrpc_stream_abandon(claim_id=claim_id) abandon_tx = await self.daemon.jsonrpc_stream_abandon(claim_id=claim_id)
await self.blockchain.generate(1) await self.blockchain.generate(1)
await self.ledger.on_header.where(lambda e: e.height == 213) await asyncio.wait_for(self.on_header(213), 1.0)
c1 = await self.resolve(f'still-valid#{still_valid.outputs[0].claim_id}') c1 = await self.resolve(f'still-valid#{still_valid.outputs[0].claim_id}')
c2 = await self.daemon.jsonrpc_resolve([f'still-valid#{claim_id[:2]}']) c2 = await self.daemon.jsonrpc_resolve([f'still-valid#{claim_id[:2]}'])
c3 = await self.daemon.jsonrpc_resolve([f'still-valid']) c3 = await self.daemon.jsonrpc_resolve([f'still-valid'])
@ -112,11 +113,10 @@ class BlockchainReorganizationTests(CommandTestCase):
# reorg the last block dropping our claim tx # reorg the last block dropping our claim tx
await self.blockchain.invalidate_block(invalidated_block_hash) await self.blockchain.invalidate_block(invalidated_block_hash)
await self.blockchain.clear_mempool() await self.conductor.clear_mempool()
await self.blockchain.generate(2) await self.blockchain.generate(2)
# wait for the client to catch up and verify the reorg
await asyncio.wait_for(self.on_header(209), 3.0) await asyncio.wait_for(self.on_header(209), 3.0)
await self.assertBlockHash(207) await self.assertBlockHash(207)
await self.assertBlockHash(208) await self.assertBlockHash(208)
await self.assertBlockHash(209) await self.assertBlockHash(209)
@ -142,9 +142,8 @@ class BlockchainReorganizationTests(CommandTestCase):
# broadcast the claim in a different block # broadcast the claim in a different block
new_txid = await self.blockchain.sendrawtransaction(hexlify(broadcast_tx.raw).decode()) new_txid = await self.blockchain.sendrawtransaction(hexlify(broadcast_tx.raw).decode())
self.assertEqual(broadcast_tx.id, new_txid) self.assertEqual(broadcast_tx.id, new_txid)
await self.blockchain.generate(1)
# wait for the client to catch up await self.blockchain.generate(1)
await asyncio.wait_for(self.on_header(210), 1.0) await asyncio.wait_for(self.on_header(210), 1.0)
# verify the claim is in the new block and that it is returned by claim_search # verify the claim is in the new block and that it is returned by claim_search
@ -191,7 +190,7 @@ class BlockchainReorganizationTests(CommandTestCase):
# reorg the last block dropping our claim tx # reorg the last block dropping our claim tx
await self.blockchain.invalidate_block(invalidated_block_hash) await self.blockchain.invalidate_block(invalidated_block_hash)
await self.blockchain.clear_mempool() await self.conductor.clear_mempool()
await self.blockchain.generate(2) await self.blockchain.generate(2)
# wait for the client to catch up and verify the reorg # wait for the client to catch up and verify the reorg
@ -222,8 +221,6 @@ class BlockchainReorganizationTests(CommandTestCase):
new_txid = await self.blockchain.sendrawtransaction(hexlify(broadcast_tx.raw).decode()) new_txid = await self.blockchain.sendrawtransaction(hexlify(broadcast_tx.raw).decode())
self.assertEqual(broadcast_tx.id, new_txid) self.assertEqual(broadcast_tx.id, new_txid)
await self.blockchain.generate(1) await self.blockchain.generate(1)
# wait for the client to catch up
await asyncio.wait_for(self.on_header(210), 1.0) await asyncio.wait_for(self.on_header(210), 1.0)
# verify the claim is in the new block and that it is returned by claim_search # verify the claim is in the new block and that it is returned by claim_search

View file

@ -1,13 +1,16 @@
import asyncio import asyncio
import scribe
import lbry
from unittest.mock import Mock from unittest.mock import Mock
from scribe.blockchain.network import LBCRegTest
from scribe.hub.udp import StatusServer
from scribe.hub.session import LBRYElectrumX
from lbry.wallet.network import Network from lbry.wallet.network import Network
from lbry.wallet.orchstr8 import Conductor from lbry.wallet.orchstr8 import Conductor
from lbry.wallet.orchstr8.node import SPVNode from lbry.wallet.orchstr8.node import SPVNode
from lbry.wallet.rpc import RPCSession from lbry.wallet.rpc import RPCSession
from lbry.wallet.server.udp import StatusServer
from lbry.testcase import IntegrationTestCase, AsyncioTestCase from lbry.testcase import IntegrationTestCase, AsyncioTestCase
from lbry.conf import Config from lbry.conf import Config
@ -22,7 +25,7 @@ class NetworkTests(IntegrationTestCase):
async def test_server_features(self): async def test_server_features(self):
self.assertDictEqual({ self.assertDictEqual({
'genesis_hash': self.conductor.spv_node.coin_class.GENESIS_HASH, 'genesis_hash': LBCRegTest.GENESIS_HASH,
'hash_function': 'sha256', 'hash_function': 'sha256',
'hosts': {}, 'hosts': {},
'protocol_max': '0.199.0', 'protocol_max': '0.199.0',
@ -32,22 +35,27 @@ class NetworkTests(IntegrationTestCase):
'payment_address': '', 'payment_address': '',
'donation_address': '', 'donation_address': '',
'daily_fee': '0', 'daily_fee': '0',
'server_version': lbry.__version__, 'server_version': scribe.__version__,
'trending_algorithm': 'fast_ar', 'trending_algorithm': 'fast_ar',
}, await self.ledger.network.get_server_features()) }, await self.ledger.network.get_server_features())
# await self.conductor.spv_node.stop() # await self.conductor.spv_node.stop()
payment_address, donation_address = await self.account.get_addresses(limit=2) payment_address, donation_address = await self.account.get_addresses(limit=2)
original_address = self.conductor.spv_node.server.env.payment_address
original_donation_address = self.conductor.spv_node.server.env.donation_address
original_description = self.conductor.spv_node.server.env.description
original_daily_fee = self.conductor.spv_node.server.env.daily_fee
self.conductor.spv_node.server.env.payment_address = payment_address self.conductor.spv_node.server.env.payment_address = payment_address
self.conductor.spv_node.server.env.donation_address = donation_address self.conductor.spv_node.server.env.donation_address = donation_address
self.conductor.spv_node.server.env.description = 'Fastest server in the west.' self.conductor.spv_node.server.env.description = 'Fastest server in the west.'
self.conductor.spv_node.server.env.daily_fee = '42' self.conductor.spv_node.server.env.daily_fee = '42'
from lbry.wallet.server.session import LBRYElectrumX
LBRYElectrumX.set_server_features(self.conductor.spv_node.server.env) LBRYElectrumX.set_server_features(self.conductor.spv_node.server.env)
# await self.ledger.network.on_connected.first # await self.ledger.network.on_connected.first
self.assertDictEqual({ self.assertDictEqual({
'genesis_hash': self.conductor.spv_node.coin_class.GENESIS_HASH, 'genesis_hash': LBCRegTest.GENESIS_HASH,
'hash_function': 'sha256', 'hash_function': 'sha256',
'hosts': {}, 'hosts': {},
'protocol_max': '0.199.0', 'protocol_max': '0.199.0',
@ -57,16 +65,23 @@ class NetworkTests(IntegrationTestCase):
'payment_address': payment_address, 'payment_address': payment_address,
'donation_address': donation_address, 'donation_address': donation_address,
'daily_fee': '42', 'daily_fee': '42',
'server_version': lbry.__version__, 'server_version': scribe.__version__,
'trending_algorithm': 'fast_ar', 'trending_algorithm': 'fast_ar',
}, await self.ledger.network.get_server_features()) }, await self.ledger.network.get_server_features())
# cleanup the changes since the attributes are set on the class
self.conductor.spv_node.server.env.payment_address = original_address
self.conductor.spv_node.server.env.donation_address = original_donation_address
self.conductor.spv_node.server.env.description = original_description
self.conductor.spv_node.server.env.daily_fee = original_daily_fee
LBRYElectrumX.set_server_features(self.conductor.spv_node.server.env)
class ReconnectTests(IntegrationTestCase): class ReconnectTests(IntegrationTestCase):
async def test_multiple_servers(self): async def test_multiple_servers(self):
# we have a secondary node that connects later, so # we have a secondary node that connects later, so
node2 = SPVNode(self.conductor.spv_module, node_number=2) node2 = SPVNode(node_number=2)
await node2.start(self.blockchain) await node2.start(self.blockchain)
self.ledger.network.config['explicit_servers'].append((node2.hostname, node2.port)) self.ledger.network.config['explicit_servers'].append((node2.hostname, node2.port))
@ -86,7 +101,7 @@ class ReconnectTests(IntegrationTestCase):
await self.ledger.stop() await self.ledger.stop()
initial_height = self.ledger.local_height_including_downloaded_height initial_height = self.ledger.local_height_including_downloaded_height
await self.blockchain.generate(100) await self.blockchain.generate(100)
while self.conductor.spv_node.server.session_mgr.notified_height < initial_height + 99: # off by 1 while self.conductor.spv_node.server.session_manager.notified_height < initial_height + 99: # off by 1
await asyncio.sleep(0.1) await asyncio.sleep(0.1)
self.assertEqual(initial_height, self.ledger.local_height_including_downloaded_height) self.assertEqual(initial_height, self.ledger.local_height_including_downloaded_height)
await self.ledger.headers.open() await self.ledger.headers.open()
@ -101,12 +116,7 @@ class ReconnectTests(IntegrationTestCase):
self.ledger.network.client.transport.close() self.ledger.network.client.transport.close()
self.assertFalse(self.ledger.network.is_connected) self.assertFalse(self.ledger.network.is_connected)
await self.ledger.resolve([], 'derp') await self.ledger.resolve([], 'derp')
sendtxid = await self.blockchain.send_to_address(address1, 1.1337) sendtxid = await self.send_to_address_and_wait(address1, 1.1337, 1)
# await self.ledger.resolve([], 'derp')
# self.assertTrue(self.ledger.network.is_connected)
await asyncio.wait_for(self.on_transaction_id(sendtxid), 10.0) # mempool
await self.blockchain.generate(1)
await self.on_transaction_id(sendtxid) # confirmed
self.assertLess(self.ledger.network.client.response_time, 1) # response time properly set lower, we are fine self.assertLess(self.ledger.network.client.response_time, 1) # response time properly set lower, we are fine
await self.assertBalance(self.account, '1.1337') await self.assertBalance(self.account, '1.1337')
@ -135,7 +145,7 @@ class ReconnectTests(IntegrationTestCase):
await self.conductor.spv_node.stop() await self.conductor.spv_node.stop()
self.assertFalse(self.ledger.network.is_connected) self.assertFalse(self.ledger.network.is_connected)
await asyncio.sleep(0.2) # let it retry and fail once await asyncio.sleep(0.2) # let it retry and fail once
await self.conductor.spv_node.start(self.conductor.blockchain_node) await self.conductor.spv_node.start(self.conductor.lbcwallet_node)
await self.ledger.network.on_connected.first await self.ledger.network.on_connected.first
self.assertTrue(self.ledger.network.is_connected) self.assertTrue(self.ledger.network.is_connected)
@ -161,15 +171,16 @@ class ReconnectTests(IntegrationTestCase):
class UDPServerFailDiscoveryTest(AsyncioTestCase): class UDPServerFailDiscoveryTest(AsyncioTestCase):
async def test_wallet_connects_despite_lack_of_udp(self): async def test_wallet_connects_despite_lack_of_udp(self):
conductor = Conductor() conductor = Conductor()
conductor.spv_node.udp_port = '0' conductor.spv_node.udp_port = '0'
await conductor.start_blockchain() await conductor.start_lbcd()
self.addCleanup(conductor.stop_blockchain) self.addCleanup(conductor.stop_lbcd)
await conductor.start_lbcwallet()
self.addCleanup(conductor.stop_lbcwallet)
await conductor.start_spv() await conductor.start_spv()
self.addCleanup(conductor.stop_spv) self.addCleanup(conductor.stop_spv)
self.assertFalse(conductor.spv_node.server.bp.status_server.is_running) self.assertFalse(conductor.spv_node.server.status_server.is_running)
await asyncio.wait_for(conductor.start_wallet(), timeout=5) await asyncio.wait_for(conductor.start_wallet(), timeout=5)
self.addCleanup(conductor.stop_wallet) self.addCleanup(conductor.stop_wallet)
self.assertTrue(conductor.wallet_node.ledger.network.is_connected) self.assertTrue(conductor.wallet_node.ledger.network.is_connected)

View file

@ -1,6 +1,6 @@
from typing import Optional from typing import Optional
from lbry.testcase import CommandTestCase from lbry.testcase import CommandTestCase
from lbry.schema.purchase import Purchase from scribe.schema.purchase import Purchase
from lbry.wallet.transaction import Transaction from lbry.wallet.transaction import Transaction
from lbry.wallet.dewies import lbc_to_dewies, dewies_to_lbc from lbry.wallet.dewies import lbc_to_dewies, dewies_to_lbc
@ -103,7 +103,7 @@ class PurchaseCommandTests(CommandTestCase):
# purchase non-existent claim fails # purchase non-existent claim fails
with self.assertRaisesRegex(Exception, "Could not find claim with claim_id"): with self.assertRaisesRegex(Exception, "Could not find claim with claim_id"):
await self.daemon.jsonrpc_purchase_create('abc123') await self.daemon.jsonrpc_purchase_create('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa')
# purchase stream with no price fails # purchase stream with no price fails
no_price_stream = await self.priced_stream('no_price_stream', price=None) no_price_stream = await self.priced_stream('no_price_stream', price=None)
@ -174,8 +174,7 @@ class PurchaseCommandTests(CommandTestCase):
self.merchant_address = await self.account.receiving.get_or_create_usable_address() self.merchant_address = await self.account.receiving.get_or_create_usable_address()
daemon2 = await self.add_daemon() daemon2 = await self.add_daemon()
address2 = await daemon2.wallet_manager.default_account.receiving.get_or_create_usable_address() address2 = await daemon2.wallet_manager.default_account.receiving.get_or_create_usable_address()
sendtxid = await self.blockchain.send_to_address(address2, 2) await self.send_to_address_and_wait(address2, 2, 1, ledger=daemon2.ledger)
await self.confirm_tx(sendtxid, daemon2.ledger)
stream = await self.priced_stream('a', '1.0') stream = await self.priced_stream('a', '1.0')
await self.assertBalance(self.account, '9.987893') await self.assertBalance(self.account, '9.987893')

View file

@ -63,7 +63,7 @@ class SyncTests(IntegrationTestCase):
await self.assertBalance(account1, '1.0') await self.assertBalance(account1, '1.0')
await self.assertBalance(account2, '1.0') await self.assertBalance(account2, '1.0')
await self.blockchain.generate(1) await self.generate(1)
# pay 0.01 from main node to receiving node, would have increased change addresses # pay 0.01 from main node to receiving node, would have increased change addresses
address0 = (await account0.receiving.get_addresses())[0] address0 = (await account0.receiving.get_addresses())[0]
@ -79,7 +79,7 @@ class SyncTests(IntegrationTestCase):
account1.ledger.wait(tx), account1.ledger.wait(tx),
account2.ledger.wait(tx), account2.ledger.wait(tx),
]) ])
await self.blockchain.generate(1) await self.generate(1)
await asyncio.wait([ await asyncio.wait([
account0.ledger.wait(tx), account0.ledger.wait(tx),
account1.ledger.wait(tx), account1.ledger.wait(tx),
@ -92,7 +92,7 @@ class SyncTests(IntegrationTestCase):
await self.assertBalance(account1, '0.989876') await self.assertBalance(account1, '0.989876')
await self.assertBalance(account2, '0.989876') await self.assertBalance(account2, '0.989876')
await self.blockchain.generate(1) await self.generate(1)
# create a new mirror node and see if it syncs to same balance from scratch # create a new mirror node and see if it syncs to same balance from scratch
node3 = await self.make_wallet_node(account1.seed) node3 = await self.make_wallet_node(account1.seed)

View file

@ -11,7 +11,7 @@ from lbry.wallet.dewies import dict_values_to_lbc
class WalletCommands(CommandTestCase): class WalletCommands(CommandTestCase):
async def test_wallet_create_and_add_subscribe(self): async def test_wallet_create_and_add_subscribe(self):
session = next(iter(self.conductor.spv_node.server.session_mgr.sessions.values())) session = next(iter(self.conductor.spv_node.server.session_manager.sessions.values()))
self.assertEqual(len(session.hashX_subs), 27) self.assertEqual(len(session.hashX_subs), 27)
wallet = await self.daemon.jsonrpc_wallet_create('foo', create_account=True, single_key=True) wallet = await self.daemon.jsonrpc_wallet_create('foo', create_account=True, single_key=True)
self.assertEqual(len(session.hashX_subs), 28) self.assertEqual(len(session.hashX_subs), 28)
@ -23,7 +23,7 @@ class WalletCommands(CommandTestCase):
async def test_wallet_syncing_status(self): async def test_wallet_syncing_status(self):
address = await self.daemon.jsonrpc_address_unused() address = await self.daemon.jsonrpc_address_unused()
self.assertFalse(self.daemon.jsonrpc_wallet_status()['is_syncing']) self.assertFalse(self.daemon.jsonrpc_wallet_status()['is_syncing'])
await self.blockchain.send_to_address(address, 1) await self.send_to_address_and_wait(address, 1)
await self.ledger._update_tasks.started.wait() await self.ledger._update_tasks.started.wait()
self.assertTrue(self.daemon.jsonrpc_wallet_status()['is_syncing']) self.assertTrue(self.daemon.jsonrpc_wallet_status()['is_syncing'])
await self.ledger._update_tasks.done.wait() await self.ledger._update_tasks.done.wait()
@ -47,9 +47,9 @@ class WalletCommands(CommandTestCase):
status = await self.daemon.jsonrpc_status() status = await self.daemon.jsonrpc_status()
self.assertEqual(len(status['wallet']['servers']), 1) self.assertEqual(len(status['wallet']['servers']), 1)
self.assertEqual(status['wallet']['servers'][0]['port'], 50002) self.assertEqual(status['wallet']['servers'][0]['port'], 50002)
await self.conductor.spv_node.stop(True) await self.conductor.spv_node.stop()
self.conductor.spv_node.port = 54320 self.conductor.spv_node.port = 54320
await self.conductor.spv_node.start(self.conductor.blockchain_node) await self.conductor.spv_node.start(self.conductor.lbcwallet_node)
status = await self.daemon.jsonrpc_status() status = await self.daemon.jsonrpc_status()
self.assertEqual(len(status['wallet']['servers']), 0) self.assertEqual(len(status['wallet']['servers']), 0)
self.daemon.jsonrpc_settings_set('lbryum_servers', ['localhost:54320']) self.daemon.jsonrpc_settings_set('lbryum_servers', ['localhost:54320'])
@ -59,23 +59,22 @@ class WalletCommands(CommandTestCase):
self.assertEqual(status['wallet']['servers'][0]['port'], 54320) self.assertEqual(status['wallet']['servers'][0]['port'], 54320)
async def test_sending_to_scripthash_address(self): async def test_sending_to_scripthash_address(self):
self.assertEqual(await self.blockchain.get_balance(), '95.99973580') bal = await self.blockchain.get_balance()
await self.assertBalance(self.account, '10.0') await self.assertBalance(self.account, '10.0')
p2sh_address1 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS) p2sh_address1 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS)
tx = await self.account_send('2.0', p2sh_address1) tx = await self.account_send('2.0', p2sh_address1)
self.assertEqual(tx['outputs'][0]['address'], p2sh_address1) self.assertEqual(tx['outputs'][0]['address'], p2sh_address1)
self.assertEqual(await self.blockchain.get_balance(), '98.99973580') # +1 lbc for confirm block self.assertEqual(await self.blockchain.get_balance(), str(float(bal)+3)) # +1 lbc for confirm block
await self.assertBalance(self.account, '7.999877') await self.assertBalance(self.account, '7.999877')
await self.wallet_send('3.0', p2sh_address1) await self.wallet_send('3.0', p2sh_address1)
self.assertEqual(await self.blockchain.get_balance(), '102.99973580') # +1 lbc for confirm block self.assertEqual(await self.blockchain.get_balance(), str(float(bal)+7)) # +1 lbc for confirm block
await self.assertBalance(self.account, '4.999754') await self.assertBalance(self.account, '4.999754')
async def test_balance_caching(self): async def test_balance_caching(self):
account2 = await self.daemon.jsonrpc_account_create("Tip-er") account2 = await self.daemon.jsonrpc_account_create("Tip-er")
address2 = await self.daemon.jsonrpc_address_unused(account2.id) address2 = await self.daemon.jsonrpc_address_unused(account2.id)
sendtxid = await self.blockchain.send_to_address(address2, 10) await self.send_to_address_and_wait(address2, 10, 2)
await self.confirm_tx(sendtxid) await self.ledger.tasks_are_done() # don't mess with the query count while we need it
await self.generate(1)
wallet_balance = self.daemon.jsonrpc_wallet_balance wallet_balance = self.daemon.jsonrpc_wallet_balance
ledger = self.ledger ledger = self.ledger
@ -90,14 +89,16 @@ class WalletCommands(CommandTestCase):
self.assertIsNone(ledger._balance_cache.get(self.account.id)) self.assertIsNone(ledger._balance_cache.get(self.account.id))
query_count += 2 query_count += 2
self.assertEqual(await wallet_balance(), expected) balance = await wallet_balance()
self.assertEqual(self.ledger.db.db.query_count, query_count) self.assertEqual(self.ledger.db.db.query_count, query_count)
self.assertEqual(balance, expected)
self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(self.account.id))['total'], '10.0') self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(self.account.id))['total'], '10.0')
self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(account2.id))['total'], '10.0') self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(account2.id))['total'], '10.0')
# calling again uses cache # calling again uses cache
self.assertEqual(await wallet_balance(), expected) balance = await wallet_balance()
self.assertEqual(self.ledger.db.db.query_count, query_count) self.assertEqual(self.ledger.db.db.query_count, query_count)
self.assertEqual(balance, expected)
self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(self.account.id))['total'], '10.0') self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(self.account.id))['total'], '10.0')
self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(account2.id))['total'], '10.0') self.assertEqual(dict_values_to_lbc(ledger._balance_cache.get(account2.id))['total'], '10.0')
@ -123,8 +124,7 @@ class WalletCommands(CommandTestCase):
wallet2 = await self.daemon.jsonrpc_wallet_create('foo', create_account=True) wallet2 = await self.daemon.jsonrpc_wallet_create('foo', create_account=True)
account3 = wallet2.default_account account3 = wallet2.default_account
address3 = await self.daemon.jsonrpc_address_unused(account3.id, wallet2.id) address3 = await self.daemon.jsonrpc_address_unused(account3.id, wallet2.id)
await self.confirm_tx(await self.blockchain.send_to_address(address3, 1)) await self.send_to_address_and_wait(address3, 1, 1)
await self.generate(1)
account_balance = self.daemon.jsonrpc_account_balance account_balance = self.daemon.jsonrpc_account_balance
wallet_balance = self.daemon.jsonrpc_wallet_balance wallet_balance = self.daemon.jsonrpc_wallet_balance
@ -154,7 +154,7 @@ class WalletCommands(CommandTestCase):
address2 = await self.daemon.jsonrpc_address_unused(account2.id) address2 = await self.daemon.jsonrpc_address_unused(account2.id)
# send lbc to someone else # send lbc to someone else
tx = await self.daemon.jsonrpc_account_send('1.0', address2) tx = await self.daemon.jsonrpc_account_send('1.0', address2, blocking=True)
await self.confirm_tx(tx.id) await self.confirm_tx(tx.id)
self.assertEqual(await account_balance(), { self.assertEqual(await account_balance(), {
'total': '8.97741', 'total': '8.97741',
@ -187,7 +187,7 @@ class WalletCommands(CommandTestCase):
}) })
# tip claimed # tip claimed
tx = await self.daemon.jsonrpc_support_abandon(txid=support1['txid'], nout=0) tx = await self.daemon.jsonrpc_support_abandon(txid=support1['txid'], nout=0, blocking=True)
await self.confirm_tx(tx.id) await self.confirm_tx(tx.id)
self.assertEqual(await account_balance(), { self.assertEqual(await account_balance(), {
'total': '9.277303', 'total': '9.277303',
@ -238,8 +238,7 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
"carbon smart garage balance margin twelve" "carbon smart garage balance margin twelve"
) )
address = (await self.daemon2.wallet_manager.default_account.receiving.get_addresses(limit=1, only_usable=True))[0] address = (await self.daemon2.wallet_manager.default_account.receiving.get_addresses(limit=1, only_usable=True))[0]
sendtxid = await self.blockchain.send_to_address(address, 1) await self.send_to_address_and_wait(address, 1, 1, ledger=self.daemon2.ledger)
await self.confirm_tx(sendtxid, self.daemon2.ledger)
def assertWalletEncrypted(self, wallet_path, encrypted): def assertWalletEncrypted(self, wallet_path, encrypted):
with open(wallet_path) as opened: with open(wallet_path) as opened:
@ -294,7 +293,7 @@ class WalletEncryptionAndSynchronization(CommandTestCase):
'3056301006072a8648ce3d020106052b8104000a034200049ae7283f3f6723e0a1' '3056301006072a8648ce3d020106052b8104000a034200049ae7283f3f6723e0a1'
'66b7e19e1d1167f6dc5f4af61b4a58066a0d2a8bed2b35c66bccb4ec3eba316b16' '66b7e19e1d1167f6dc5f4af61b4a58066a0d2a8bed2b35c66bccb4ec3eba316b16'
'a97a6d6a4a8effd29d748901bb9789352519cd00b13d' 'a97a6d6a4a8effd29d748901bb9789352519cd00b13d'
), self.daemon2) ), self.daemon2, blocking=True)
await self.confirm_tx(channel['txid'], self.daemon2.ledger) await self.confirm_tx(channel['txid'], self.daemon2.ledger)
# both daemons will have the channel but only one has the cert so far # both daemons will have the channel but only one has the cert so far

View file

@ -1,12 +1,11 @@
import asyncio import asyncio
import lbry import scribe
import lbry.wallet from scribe.hub.session import LBRYElectrumX
from lbry.error import ServerPaymentFeeAboveMaxAllowedError from lbry.error import ServerPaymentFeeAboveMaxAllowedError
from lbry.wallet.network import ClientSession from lbry.wallet.network import ClientSession
from lbry.wallet.rpc import RPCError from lbry.wallet.rpc import RPCError
from lbry.wallet.server.db.elasticsearch.sync import make_es_index_and_run_sync
from lbry.wallet.server.session import LBRYElectrumX
from lbry.testcase import IntegrationTestCase, CommandTestCase from lbry.testcase import IntegrationTestCase, CommandTestCase
from lbry.wallet.orchstr8.node import SPVNode from lbry.wallet.orchstr8.node import SPVNode
@ -25,17 +24,17 @@ class TestSessions(IntegrationTestCase):
) )
await session.create_connection() await session.create_connection()
await session.send_request('server.banner', ()) await session.send_request('server.banner', ())
self.assertEqual(len(self.conductor.spv_node.server.session_mgr.sessions), 1) self.assertEqual(len(self.conductor.spv_node.server.session_manager.sessions), 1)
self.assertFalse(session.is_closing()) self.assertFalse(session.is_closing())
await asyncio.sleep(1.1) await asyncio.sleep(1.1)
with self.assertRaises(asyncio.TimeoutError): with self.assertRaises(asyncio.TimeoutError):
await session.send_request('server.banner', ()) await session.send_request('server.banner', ())
self.assertTrue(session.is_closing()) self.assertTrue(session.is_closing())
self.assertEqual(len(self.conductor.spv_node.server.session_mgr.sessions), 0) self.assertEqual(len(self.conductor.spv_node.server.session_manager.sessions), 0)
async def test_proper_version(self): async def test_proper_version(self):
info = await self.ledger.network.get_server_features() info = await self.ledger.network.get_server_features()
self.assertEqual(lbry.__version__, info['server_version']) self.assertEqual(scribe.__version__, info['server_version'])
async def test_client_errors(self): async def test_client_errors(self):
# Goal is ensuring thsoe are raised and not trapped accidentally # Goal is ensuring thsoe are raised and not trapped accidentally
@ -46,7 +45,7 @@ class TestSessions(IntegrationTestCase):
class TestUsagePayment(CommandTestCase): class TestUsagePayment(CommandTestCase):
async def _test_single_server_payment(self): async def test_single_server_payment(self):
wallet_pay_service = self.daemon.component_manager.get_component('wallet_server_payments') wallet_pay_service = self.daemon.component_manager.get_component('wallet_server_payments')
wallet_pay_service.payment_period = 1 wallet_pay_service.payment_period = 1
# only starts with a positive max key fee # only starts with a positive max key fee
@ -63,8 +62,8 @@ class TestUsagePayment(CommandTestCase):
_, history = await self.ledger.get_local_status_and_history(address) _, history = await self.ledger.get_local_status_and_history(address)
self.assertEqual(history, []) self.assertEqual(history, [])
node = SPVNode(self.conductor.spv_module, node_number=2) node = SPVNode(node_number=2)
await node.start(self.blockchain, extraconf={"PAYMENT_ADDRESS": address, "DAILY_FEE": "1.1"}) await node.start(self.blockchain, extraconf={"payment_address": address, "daily_fee": "1.1"})
self.addCleanup(node.stop) self.addCleanup(node.stop)
self.daemon.jsonrpc_settings_set('lbryum_servers', [f"{node.hostname}:{node.port}"]) self.daemon.jsonrpc_settings_set('lbryum_servers', [f"{node.hostname}:{node.port}"])
await self.daemon.jsonrpc_wallet_reconnect() await self.daemon.jsonrpc_wallet_reconnect()
@ -90,56 +89,78 @@ class TestUsagePayment(CommandTestCase):
class TestESSync(CommandTestCase): class TestESSync(CommandTestCase):
async def test_es_sync_utility(self): async def test_es_sync_utility(self):
es_writer = self.conductor.spv_node.es_writer
server_search_client = self.conductor.spv_node.server.session_manager.search_index
for i in range(10): for i in range(10):
await self.stream_create(f"stream{i}", bid='0.001') await self.stream_create(f"stream{i}", bid='0.001')
await self.generate(1) await self.generate(1)
self.assertEqual(10, len(await self.claim_search(order_by=['height']))) self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
db = self.conductor.spv_node.server.db
env = self.conductor.spv_node.server.env
await db.search_index.delete_index()
db.search_index.clear_caches()
self.assertEqual(0, len(await self.claim_search(order_by=['height'])))
await db.search_index.stop()
async def resync():
await db.search_index.start()
db.search_index.clear_caches()
await make_es_index_and_run_sync(env, db=db, index_name=db.search_index.index, force=True)
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
# delete the index and verify nothing is returned by claim search
await es_writer.delete_index()
server_search_client.clear_caches()
self.assertEqual(0, len(await self.claim_search(order_by=['height']))) self.assertEqual(0, len(await self.claim_search(order_by=['height'])))
await resync() # reindex, 10 claims should be returned
await es_writer.reindex(force=True)
# this time we will test a migration from unversioned to v1
await db.search_index.sync_client.indices.delete_template(db.search_index.index)
await db.search_index.stop()
await make_es_index_and_run_sync(env, db=db, index_name=db.search_index.index, force=True)
await db.search_index.start()
await resync()
self.assertEqual(10, len(await self.claim_search(order_by=['height']))) self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
server_search_client.clear_caches()
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
# reindex again, this should not appear to do anything but will delete and reinsert the same 10 claims
await es_writer.reindex(force=True)
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
server_search_client.clear_caches()
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
# delete the index again and stop the writer, upon starting it the writer should reindex automatically
await es_writer.delete_index()
await es_writer.stop()
server_search_client.clear_caches()
self.assertEqual(0, len(await self.claim_search(order_by=['height'])))
await es_writer.start(reindex=True)
self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
# stop the es writer and advance the chain by 1, adding a new claim. upon resuming the es writer, it should
# add the new claim
await es_writer.stop()
await self.stream_create(f"stream11", bid='0.001', confirm=False)
generate_block_task = asyncio.create_task(self.generate(1))
await es_writer.start()
await generate_block_task
self.assertEqual(11, len(await self.claim_search(order_by=['height'])))
# # this time we will test a migration from unversioned to v1
# await db.search_index.sync_client.indices.delete_template(db.search_index.index)
# await db.search_index.stop()
#
# await make_es_index_and_run_sync(env, db=db, index_name=db.search_index.index, force=True)
# await db.search_index.start()
#
# await es_writer.reindex()
# self.assertEqual(10, len(await self.claim_search(order_by=['height'])))
class TestHubDiscovery(CommandTestCase): class TestHubDiscovery(CommandTestCase):
async def test_hub_discovery(self): async def test_hub_discovery(self):
us_final_node = SPVNode(self.conductor.spv_module, node_number=2) us_final_node = SPVNode(node_number=2)
await us_final_node.start(self.blockchain, extraconf={"COUNTRY": "US"}) await us_final_node.start(self.blockchain, extraconf={"country": "US"})
self.addCleanup(us_final_node.stop) self.addCleanup(us_final_node.stop)
final_node_host = f"{us_final_node.hostname}:{us_final_node.port}" final_node_host = f"{us_final_node.hostname}:{us_final_node.port}"
kp_final_node = SPVNode(self.conductor.spv_module, node_number=3) kp_final_node = SPVNode(node_number=3)
await kp_final_node.start(self.blockchain, extraconf={"COUNTRY": "KP"}) await kp_final_node.start(self.blockchain, extraconf={"country": "KP"})
self.addCleanup(kp_final_node.stop) self.addCleanup(kp_final_node.stop)
kp_final_node_host = f"{kp_final_node.hostname}:{kp_final_node.port}" kp_final_node_host = f"{kp_final_node.hostname}:{kp_final_node.port}"
relay_node = SPVNode(self.conductor.spv_module, node_number=4) relay_node = SPVNode(node_number=4)
await relay_node.start(self.blockchain, extraconf={ await relay_node.start(self.blockchain, extraconf={
"COUNTRY": "FR", "country": "FR",
"PEER_HUBS": ",".join([kp_final_node_host, final_node_host]) "peer_hubs": ",".join([kp_final_node_host, final_node_host])
}) })
relay_node_host = f"{relay_node.hostname}:{relay_node.port}" relay_node_host = f"{relay_node.hostname}:{relay_node.port}"
self.addCleanup(relay_node.stop) self.addCleanup(relay_node.stop)
@ -186,7 +207,7 @@ class TestHubDiscovery(CommandTestCase):
self.daemon.ledger.network.client.server_address_and_port, ('127.0.0.1', kp_final_node.port) self.daemon.ledger.network.client.server_address_and_port, ('127.0.0.1', kp_final_node.port)
) )
kp_final_node.server.session_mgr._notify_peer('127.0.0.1:9988') kp_final_node.server.session_manager._notify_peer('127.0.0.1:9988')
await self.daemon.ledger.network.on_hub.first await self.daemon.ledger.network.on_hub.first
await asyncio.sleep(0.5) # wait for above event to be processed by other listeners await asyncio.sleep(0.5) # wait for above event to be processed by other listeners
self.assertEqual( self.assertEqual(

View file

@ -12,7 +12,6 @@ from lbry.error import InsufficientFundsError
from lbry.extras.daemon.daemon import DEFAULT_PAGE_SIZE from lbry.extras.daemon.daemon import DEFAULT_PAGE_SIZE
from lbry.testcase import CommandTestCase from lbry.testcase import CommandTestCase
from lbry.wallet.orchstr8.node import SPVNode from lbry.wallet.orchstr8.node import SPVNode
from lbry.wallet.server.db.common import STREAM_TYPES
from lbry.wallet.transaction import Transaction, Output from lbry.wallet.transaction import Transaction, Output
from lbry.wallet.util import satoshis_to_coins as lbc from lbry.wallet.util import satoshis_to_coins as lbc
from lbry.crypto.hash import sha256 from lbry.crypto.hash import sha256
@ -20,6 +19,16 @@ from lbry.crypto.hash import sha256
log = logging.getLogger(__name__) log = logging.getLogger(__name__)
STREAM_TYPES = {
'video': 1,
'audio': 2,
'image': 3,
'document': 4,
'binary': 5,
'model': 6,
}
def verify(channel, data, signature, channel_hash=None): def verify(channel, data, signature, channel_hash=None):
pieces = [ pieces = [
signature['signing_ts'].encode(), signature['signing_ts'].encode(),
@ -125,18 +134,6 @@ class ClaimSearchCommand(ClaimTestCase):
with self.assertRaises(ConnectionResetError): with self.assertRaises(ConnectionResetError):
await self.claim_search(claim_ids=claim_ids) await self.claim_search(claim_ids=claim_ids)
async def test_claim_search_as_reader_server(self):
node2 = SPVNode(self.conductor.spv_module, node_number=2)
current_prefix = self.conductor.spv_node.server.bp.env.es_index_prefix
await node2.start(self.blockchain, extraconf={'ES_MODE': 'reader', 'ES_INDEX_PREFIX': current_prefix})
self.addCleanup(node2.stop)
self.ledger.network.config['default_servers'] = [(node2.hostname, node2.port)]
await self.ledger.stop()
await self.ledger.start()
channel2 = await self.channel_create('@abc', '0.1', allow_duplicate_name=True)
await asyncio.sleep(1) # fixme: find a way to block on the writer
await self.assertFindsClaims([channel2], name='@abc')
async def test_basic_claim_search(self): async def test_basic_claim_search(self):
await self.create_channel() await self.create_channel()
channel_txo = self.channel['outputs'][0] channel_txo = self.channel['outputs'][0]
@ -424,6 +421,17 @@ class ClaimSearchCommand(ClaimTestCase):
limit_claims_per_channel=3, claim_type='stream' limit_claims_per_channel=3, claim_type='stream'
) )
async def test_no_source_and_valid_channel_signature_and_media_type(self):
await self.channel_create('@spam2', '1.0')
await self.stream_create('barrrrrr', '1.0', channel_name='@spam2', file_path=self.video_file_name)
paradox_no_source_claims = await self.claim_search(has_no_source=True, valid_channel_signature=True,
media_type="video/mp4")
mp4_claims = await self.claim_search(media_type="video/mp4")
no_source_claims = await self.claim_search(has_no_source=True, valid_channel_signature=True)
self.assertEqual(0, len(paradox_no_source_claims))
self.assertEqual(1, len(no_source_claims))
self.assertEqual(1, len(mp4_claims))
async def test_no_duplicates(self): async def test_no_duplicates(self):
await self.generate(10) await self.generate(10)
match = self.assertFindsClaims match = self.assertFindsClaims
@ -494,8 +502,7 @@ class ClaimSearchCommand(ClaimTestCase):
tx = await Transaction.claim_create( tx = await Transaction.claim_create(
'unknown', b'{"sources":{"lbry_sd_hash":""}}', 1, address, [self.account], self.account) 'unknown', b'{"sources":{"lbry_sd_hash":""}}', 1, address, [self.account], self.account)
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
octet = await self.stream_create() octet = await self.stream_create()
video = await self.stream_create('chrome', file_path=self.video_file_name) video = await self.stream_create('chrome', file_path=self.video_file_name)
@ -1226,7 +1233,7 @@ class ChannelCommands(CommandTestCase):
data_to_sign = "CAFEBABE" data_to_sign = "CAFEBABE"
# claim new name # claim new name
await self.channel_create('@someotherchan') await self.channel_create('@someotherchan')
channel_tx = await self.daemon.jsonrpc_channel_create('@signer', '0.1') channel_tx = await self.daemon.jsonrpc_channel_create('@signer', '0.1', blocking=True)
await self.confirm_tx(channel_tx.id) await self.confirm_tx(channel_tx.id)
channel = channel_tx.outputs[0] channel = channel_tx.outputs[0]
signature1 = await self.out(self.daemon.jsonrpc_channel_sign(channel_name='@signer', hexdata=data_to_sign)) signature1 = await self.out(self.daemon.jsonrpc_channel_sign(channel_name='@signer', hexdata=data_to_sign))
@ -1373,7 +1380,7 @@ class StreamCommands(ClaimTestCase):
self.assertEqual('8.989893', (await self.daemon.jsonrpc_account_balance())['available']) self.assertEqual('8.989893', (await self.daemon.jsonrpc_account_balance())['available'])
result = await self.out(self.daemon.jsonrpc_account_send( result = await self.out(self.daemon.jsonrpc_account_send(
'5.0', await self.daemon.jsonrpc_address_unused(account2_id) '5.0', await self.daemon.jsonrpc_address_unused(account2_id), blocking=True
)) ))
await self.confirm_tx(result['txid']) await self.confirm_tx(result['txid'])
@ -1514,10 +1521,13 @@ class StreamCommands(ClaimTestCase):
await self.channel_create('@filtering', '0.1') await self.channel_create('@filtering', '0.1')
) )
self.conductor.spv_node.server.db.filtering_channel_hashes.add(bytes.fromhex(filtering_channel_id)) self.conductor.spv_node.server.db.filtering_channel_hashes.add(bytes.fromhex(filtering_channel_id))
self.assertEqual(0, len(self.conductor.spv_node.server.db.filtered_streams)) self.conductor.spv_node.es_writer.db.filtering_channel_hashes.add(bytes.fromhex(filtering_channel_id))
await self.stream_repost(bad_content_id, 'filter1', '0.1', channel_name='@filtering')
self.assertEqual(1, len(self.conductor.spv_node.server.db.filtered_streams))
self.assertEqual(0, len(self.conductor.spv_node.es_writer.db.filtered_streams))
await self.stream_repost(bad_content_id, 'filter1', '0.1', channel_name='@filtering')
self.assertEqual(1, len(self.conductor.spv_node.es_writer.db.filtered_streams))
self.assertEqual('0.1', (await self.out(self.daemon.jsonrpc_resolve('bad_content')))['bad_content']['amount'])
# search for filtered content directly # search for filtered content directly
result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content')) result = await self.out(self.daemon.jsonrpc_claim_search(name='bad_content'))
blocked = result['blocked'] blocked = result['blocked']
@ -1560,14 +1570,14 @@ class StreamCommands(ClaimTestCase):
) )
# test setting from env vars and starting from scratch # test setting from env vars and starting from scratch
await self.conductor.spv_node.stop(False) await self.conductor.spv_node.stop(False)
await self.conductor.spv_node.start(self.conductor.blockchain_node, await self.conductor.spv_node.start(self.conductor.lbcwallet_node,
extraconf={'BLOCKING_CHANNEL_IDS': blocking_channel_id, extraconf={'blocking_channel_ids': [blocking_channel_id],
'FILTERING_CHANNEL_IDS': filtering_channel_id}) 'filtering_channel_ids': [filtering_channel_id]})
await self.daemon.wallet_manager.reset() await self.daemon.wallet_manager.reset()
self.assertEqual(0, len(self.conductor.spv_node.server.db.blocked_streams)) self.assertEqual(0, len(self.conductor.spv_node.es_writer.db.blocked_streams))
await self.stream_repost(bad_content_id, 'block1', '0.1', channel_name='@blocking') await self.stream_repost(bad_content_id, 'block1', '0.1', channel_name='@blocking')
self.assertEqual(1, len(self.conductor.spv_node.server.db.blocked_streams)) self.assertEqual(1, len(self.conductor.spv_node.es_writer.db.blocked_streams))
# blocked content is not resolveable # blocked content is not resolveable
error = (await self.resolve('lbry://@some_channel/bad_content'))['error'] error = (await self.resolve('lbry://@some_channel/bad_content'))['error']
@ -1626,6 +1636,11 @@ class StreamCommands(ClaimTestCase):
self.assertEqual((await self.resolve('lbry://worse_content'))['error']['name'], 'BLOCKED') self.assertEqual((await self.resolve('lbry://worse_content'))['error']['name'], 'BLOCKED')
self.assertEqual((await self.resolve('lbry://@bad_channel/worse_content'))['error']['name'], 'BLOCKED') self.assertEqual((await self.resolve('lbry://@bad_channel/worse_content'))['error']['name'], 'BLOCKED')
await self.stream_update(worse_content_id, channel_name='@bad_channel', tags=['bad-stuff'])
self.assertEqual((await self.resolve('lbry://@bad_channel'))['error']['name'], 'BLOCKED')
self.assertEqual((await self.resolve('lbry://worse_content'))['error']['name'], 'BLOCKED')
self.assertEqual((await self.resolve('lbry://@bad_channel/worse_content'))['error']['name'], 'BLOCKED')
async def test_publish_updates_file_list(self): async def test_publish_updates_file_list(self):
tx = await self.stream_create(title='created') tx = await self.stream_create(title='created')
txo = tx['outputs'][0] txo = tx['outputs'][0]
@ -2177,7 +2192,7 @@ class SupportCommands(CommandTestCase):
tip = await self.out( tip = await self.out(
self.daemon.jsonrpc_support_create( self.daemon.jsonrpc_support_create(
claim_id, '1.0', True, account_id=account2.id, wallet_id='wallet2', claim_id, '1.0', True, account_id=account2.id, wallet_id='wallet2',
funding_account_ids=[account2.id]) funding_account_ids=[account2.id], blocking=True)
) )
await self.confirm_tx(tip['txid']) await self.confirm_tx(tip['txid'])
@ -2209,7 +2224,7 @@ class SupportCommands(CommandTestCase):
support = await self.out( support = await self.out(
self.daemon.jsonrpc_support_create( self.daemon.jsonrpc_support_create(
claim_id, '2.0', False, account_id=account2.id, wallet_id='wallet2', claim_id, '2.0', False, account_id=account2.id, wallet_id='wallet2',
funding_account_ids=[account2.id]) funding_account_ids=[account2.id], blocking=True)
) )
await self.confirm_tx(support['txid']) await self.confirm_tx(support['txid'])

View file

@ -1,9 +1,10 @@
import unittest
from unittest import skipIf from unittest import skipIf
import asyncio import asyncio
import os import os
from binascii import hexlify from binascii import hexlify
from lbry.schema import Claim from scribe.schema import Claim
from lbry.stream.background_downloader import BackgroundDownloader from lbry.stream.background_downloader import BackgroundDownloader
from lbry.stream.descriptor import StreamDescriptor from lbry.stream.descriptor import StreamDescriptor
from lbry.testcase import CommandTestCase from lbry.testcase import CommandTestCase
@ -36,8 +37,7 @@ class FileCommands(CommandTestCase):
tx_to_update.outputs[0], claim, 1, address, [self.account], self.account tx_to_update.outputs[0], claim, 1, address, [self.account], self.account
) )
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
self.client_session = self.daemon.file_manager.source_managers['torrent'].torrent_session self.client_session = self.daemon.file_manager.source_managers['torrent'].torrent_session
self.client_session._session.add_dht_node(('localhost', 4040)) self.client_session._session.add_dht_node(('localhost', 4040))
self.client_session.wait_start = False # fixme: this is super slow on tests self.client_session.wait_start = False # fixme: this is super slow on tests
@ -216,6 +216,7 @@ class FileCommands(CommandTestCase):
await self.wait_files_to_complete() await self.wait_files_to_complete()
self.assertNotEqual(first_path, second_path) self.assertNotEqual(first_path, second_path)
@unittest.SkipTest # FIXME: claimname/updateclaim is gone. #3480 wip, unblock #3479"
async def test_file_list_updated_metadata_on_resolve(self): async def test_file_list_updated_metadata_on_resolve(self):
await self.stream_create('foo', '0.01') await self.stream_create('foo', '0.01')
txo = (await self.daemon.resolve(self.wallet.accounts, ['lbry://foo']))['lbry://foo'] txo = (await self.daemon.resolve(self.wallet.accounts, ['lbry://foo']))['lbry://foo']
@ -504,8 +505,7 @@ class FileCommands(CommandTestCase):
tx.outputs[0].claim.stream.fee.address_bytes = b'' tx.outputs[0].claim.stream.fee.address_bytes = b''
tx.outputs[0].script.generate() tx.outputs[0].script.generate()
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
async def __raw_value_update_no_fee_amount(self, tx, claim_address): async def __raw_value_update_no_fee_amount(self, tx, claim_address):
tx = await self.daemon.jsonrpc_stream_update( tx = await self.daemon.jsonrpc_stream_update(
@ -515,8 +515,7 @@ class FileCommands(CommandTestCase):
tx.outputs[0].claim.stream.fee.message.ClearField('amount') tx.outputs[0].claim.stream.fee.message.ClearField('amount')
tx.outputs[0].script.generate() tx.outputs[0].script.generate()
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
class DiskSpaceManagement(CommandTestCase): class DiskSpaceManagement(CommandTestCase):

View file

@ -80,7 +80,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# After some soul searching Chris decides that his story needs more # After some soul searching Chris decides that his story needs more
# heart and a better ending. He takes down the story and begins the rewrite. # heart and a better ending. He takes down the story and begins the rewrite.
abandon = await self.out(self.daemon.jsonrpc_stream_abandon(claim_id, blocking=False)) abandon = await self.out(self.daemon.jsonrpc_stream_abandon(claim_id, blocking=True))
self.assertEqual(abandon['inputs'][0]['claim_id'], claim_id) self.assertEqual(abandon['inputs'][0]['claim_id'], claim_id)
await self.confirm_tx(abandon['txid']) await self.confirm_tx(abandon['txid'])
@ -103,7 +103,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# 1 LBC to which Chris readily obliges # 1 LBC to which Chris readily obliges
ramsey_account_id = (await self.out(self.daemon.jsonrpc_account_create("Ramsey")))['id'] ramsey_account_id = (await self.out(self.daemon.jsonrpc_account_create("Ramsey")))['id']
ramsey_address = await self.daemon.jsonrpc_address_unused(ramsey_account_id) ramsey_address = await self.daemon.jsonrpc_address_unused(ramsey_account_id)
result = await self.out(self.daemon.jsonrpc_account_send('1.0', ramsey_address)) result = await self.out(self.daemon.jsonrpc_account_send('1.0', ramsey_address, blocking=True))
self.assertIn("txid", result) self.assertIn("txid", result)
await self.confirm_tx(result['txid']) await self.confirm_tx(result['txid'])
@ -133,7 +133,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# And voila, and bravo and encore! His Best Friend Ramsey read the story and immediately knew this was a hit # And voila, and bravo and encore! His Best Friend Ramsey read the story and immediately knew this was a hit
# Now to keep this claim winning on the lbry blockchain he immediately supports the claim # Now to keep this claim winning on the lbry blockchain he immediately supports the claim
tx = await self.out(self.daemon.jsonrpc_support_create( tx = await self.out(self.daemon.jsonrpc_support_create(
claim_id2, '0.2', account_id=ramsey_account_id claim_id2, '0.2', account_id=ramsey_account_id, blocking=True
)) ))
await self.confirm_tx(tx['txid']) await self.confirm_tx(tx['txid'])
@ -147,7 +147,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# Now he also wanted to support the original creator of the Award Winning Novel # Now he also wanted to support the original creator of the Award Winning Novel
# So he quickly decides to send a tip to him # So he quickly decides to send a tip to him
tx = await self.out( tx = await self.out(
self.daemon.jsonrpc_support_create(claim_id2, '0.3', tip=True, account_id=ramsey_account_id) self.daemon.jsonrpc_support_create(claim_id2, '0.3', tip=True, account_id=ramsey_account_id, blocking=True)
) )
await self.confirm_tx(tx['txid']) await self.confirm_tx(tx['txid'])
@ -158,7 +158,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
await self.generate(5) await self.generate(5)
# Seeing the ravishing success of his novel Chris adds support to his claim too # Seeing the ravishing success of his novel Chris adds support to his claim too
tx = await self.out(self.daemon.jsonrpc_support_create(claim_id2, '0.4')) tx = await self.out(self.daemon.jsonrpc_support_create(claim_id2, '0.4', blocking=True))
await self.confirm_tx(tx['txid']) await self.confirm_tx(tx['txid'])
# And check if his support showed up # And check if his support showed up
@ -183,7 +183,7 @@ class EpicAdventuresOfChris45(CommandTestCase):
# But sadly Ramsey wasn't so pleased. It was hard for him to tell Chris... # But sadly Ramsey wasn't so pleased. It was hard for him to tell Chris...
# Chris, though a bit heartbroken, abandoned the claim for now, but instantly started working on new hit lyrics # Chris, though a bit heartbroken, abandoned the claim for now, but instantly started working on new hit lyrics
abandon = await self.out(self.daemon.jsonrpc_stream_abandon(txid=tx['txid'], nout=0, blocking=False)) abandon = await self.out(self.daemon.jsonrpc_stream_abandon(txid=tx['txid'], nout=0, blocking=True))
self.assertTrue(abandon['inputs'][0]['txid'], tx['txid']) self.assertTrue(abandon['inputs'][0]['txid'], tx['txid'])
await self.confirm_tx(abandon['txid']) await self.confirm_tx(abandon['txid'])

View file

@ -1,13 +1,14 @@
import asyncio import asyncio
import json import json
import hashlib import hashlib
import sys
from bisect import bisect_right from bisect import bisect_right
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
from collections import defaultdict from collections import defaultdict
from typing import NamedTuple, List from typing import NamedTuple, List
from lbry.testcase import CommandTestCase from lbry.testcase import CommandTestCase
from lbry.wallet.transaction import Transaction, Output from lbry.wallet.transaction import Transaction, Output
from lbry.schema.compat import OldClaimMessage from scribe.schema.compat import OldClaimMessage
from lbry.crypto.hash import sha256 from lbry.crypto.hash import sha256
from lbry.crypto.base58 import Base58 from lbry.crypto.base58 import Base58
@ -23,7 +24,7 @@ class BaseResolveTestCase(CommandTestCase):
def assertMatchESClaim(self, claim_from_es, claim_from_db): def assertMatchESClaim(self, claim_from_es, claim_from_db):
self.assertEqual(claim_from_es['claim_hash'][::-1].hex(), claim_from_db.claim_hash.hex()) self.assertEqual(claim_from_es['claim_hash'][::-1].hex(), claim_from_db.claim_hash.hex())
self.assertEqual(claim_from_es['claim_id'], claim_from_db.claim_hash.hex()) self.assertEqual(claim_from_es['claim_id'], claim_from_db.claim_hash.hex())
self.assertEqual(claim_from_es['activation_height'], claim_from_db.activation_height) self.assertEqual(claim_from_es['activation_height'], claim_from_db.activation_height, f"es height: {claim_from_es['activation_height']}, rocksdb height: {claim_from_db.activation_height}")
self.assertEqual(claim_from_es['last_take_over_height'], claim_from_db.last_takeover_height) self.assertEqual(claim_from_es['last_take_over_height'], claim_from_db.last_takeover_height)
self.assertEqual(claim_from_es['tx_id'], claim_from_db.tx_hash[::-1].hex()) self.assertEqual(claim_from_es['tx_id'], claim_from_db.tx_hash[::-1].hex())
self.assertEqual(claim_from_es['tx_nout'], claim_from_db.position) self.assertEqual(claim_from_es['tx_nout'], claim_from_db.position)
@ -31,125 +32,151 @@ class BaseResolveTestCase(CommandTestCase):
self.assertEqual(claim_from_es['effective_amount'], claim_from_db.effective_amount) self.assertEqual(claim_from_es['effective_amount'], claim_from_db.effective_amount)
def assertMatchDBClaim(self, expected, claim): def assertMatchDBClaim(self, expected, claim):
self.assertEqual(expected['claimId'], claim.claim_hash.hex()) self.assertEqual(expected['claimid'], claim.claim_hash.hex())
self.assertEqual(expected['validAtHeight'], claim.activation_height) self.assertEqual(expected['validatheight'], claim.activation_height)
self.assertEqual(expected['lastTakeoverHeight'], claim.last_takeover_height) self.assertEqual(expected['lasttakeoverheight'], claim.last_takeover_height)
self.assertEqual(expected['txId'], claim.tx_hash[::-1].hex()) self.assertEqual(expected['txid'], claim.tx_hash[::-1].hex())
self.assertEqual(expected['n'], claim.position) self.assertEqual(expected['n'], claim.position)
self.assertEqual(expected['amount'], claim.amount) self.assertEqual(expected['amount'], claim.amount)
self.assertEqual(expected['effectiveAmount'], claim.effective_amount) self.assertEqual(expected['effectiveamount'], claim.effective_amount)
async def assertResolvesToClaimId(self, name, claim_id): async def assertResolvesToClaimId(self, name, claim_id):
other = await self.resolve(name) other = await self.resolve(name)
if claim_id is None: if claim_id is None:
self.assertIn('error', other) self.assertIn('error', other)
self.assertEqual(other['error']['name'], 'NOT_FOUND') self.assertEqual(other['error']['name'], 'NOT_FOUND')
claims_from_es = (await self.conductor.spv_node.server.bp.db.search_index.search(name=name))[0] claims_from_es = (await self.conductor.spv_node.server.session_manager.search_index.search(name=name))[0]
claims_from_es = [c['claim_hash'][::-1].hex() for c in claims_from_es] claims_from_es = [c['claim_hash'][::-1].hex() for c in claims_from_es]
self.assertNotIn(claim_id, claims_from_es) self.assertNotIn(claim_id, claims_from_es)
else: else:
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search(claim_id=claim_id) claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(claim_id=claim_id)
self.assertEqual(claim_id, other['claim_id']) self.assertEqual(claim_id, other['claim_id'])
self.assertEqual(claim_id, claim_from_es[0][0]['claim_hash'][::-1].hex()) self.assertEqual(claim_id, claim_from_es[0][0]['claim_hash'][::-1].hex())
async def assertNoClaimForName(self, name: str): async def assertNoClaimForName(self, name: str):
lbrycrd_winning = json.loads(await self.blockchain._cli_cmnd('getvalueforname', name)) lbrycrd_winning = json.loads(await self.blockchain._cli_cmnd('getclaimsforname', name))
stream, channel, _, _ = await self.conductor.spv_node.server.bp.db.resolve(name) stream, channel, _, _ = await self.conductor.spv_node.server.db.resolve(name)
self.assertNotIn('claimId', lbrycrd_winning) if 'claims' in lbrycrd_winning and lbrycrd_winning['claims'] is not None:
self.assertEqual(len(lbrycrd_winning['claims']), 0)
if stream is not None: if stream is not None:
self.assertIsInstance(stream, LookupError) self.assertIsInstance(stream, LookupError)
else: else:
self.assertIsInstance(channel, LookupError) self.assertIsInstance(channel, LookupError)
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search(name=name) claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(name=name)
self.assertListEqual([], claim_from_es[0]) self.assertListEqual([], claim_from_es[0])
async def assertNoClaim(self, claim_id: str): async def assertNoClaim(self, name: str, claim_id: str):
self.assertDictEqual( expected = json.loads(await self.blockchain._cli_cmnd('getclaimsfornamebyid', name, '["' + claim_id + '"]'))
{}, json.loads(await self.blockchain._cli_cmnd('getclaimbyid', claim_id)) if 'claims' in expected and expected['claims'] is not None:
) # ensure that if we do have the matching claim that it is not active
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search(claim_id=claim_id) self.assertEqual(expected['claims'][0]['effectiveamount'], 0)
claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(claim_id=claim_id)
self.assertListEqual([], claim_from_es[0]) self.assertListEqual([], claim_from_es[0])
claim = await self.conductor.spv_node.server.bp.db.fs_getclaimbyid(claim_id) claim = await self.conductor.spv_node.server.db.fs_getclaimbyid(claim_id)
self.assertIsNone(claim) self.assertIsNone(claim)
async def assertMatchWinningClaim(self, name): async def assertMatchWinningClaim(self, name):
expected = json.loads(await self.blockchain._cli_cmnd('getvalueforname', name)) expected = json.loads(await self.blockchain._cli_cmnd('getclaimsfornamebybid', name, "[0]"))
stream, channel, _, _ = await self.conductor.spv_node.server.bp.db.resolve(name) stream, channel, _, _ = await self.conductor.spv_node.server.db.resolve(name)
claim = stream if stream else channel claim = stream if stream else channel
await self._assertMatchClaim(expected, claim) expected['claims'][0]['lasttakeoverheight'] = expected['lasttakeoverheight']
await self._assertMatchClaim(expected['claims'][0], claim)
return claim return claim
async def _assertMatchClaim(self, expected, claim): async def _assertMatchClaim(self, expected, claim):
self.assertMatchDBClaim(expected, claim) self.assertMatchDBClaim(expected, claim)
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search( claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(
claim_id=claim.claim_hash.hex() claim_id=claim.claim_hash.hex()
) )
self.assertEqual(len(claim_from_es[0]), 1) self.assertEqual(len(claim_from_es[0]), 1)
self.assertMatchESClaim(claim_from_es[0][0], claim) self.assertMatchESClaim(claim_from_es[0][0], claim)
self._check_supports(claim.claim_hash.hex(), expected['supports'], claim_from_es[0][0]['support_amount']) self._check_supports(claim.claim_hash.hex(), expected.get('supports', []),
claim_from_es[0][0]['support_amount'])
async def assertMatchClaim(self, claim_id, is_active_in_lbrycrd=True): async def assertMatchClaim(self, name, claim_id, is_active_in_lbrycrd=True):
expected = json.loads(await self.blockchain._cli_cmnd('getclaimbyid', claim_id)) claim = await self.conductor.spv_node.server.db.fs_getclaimbyid(claim_id)
claim = await self.conductor.spv_node.server.bp.db.fs_getclaimbyid(claim_id) claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(
if is_active_in_lbrycrd:
if not expected:
self.assertIsNone(claim)
return
self.assertMatchDBClaim(expected, claim)
else:
self.assertDictEqual({}, expected)
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search(
claim_id=claim.claim_hash.hex() claim_id=claim.claim_hash.hex()
) )
self.assertEqual(len(claim_from_es[0]), 1) self.assertEqual(len(claim_from_es[0]), 1)
self.assertEqual(claim_from_es[0][0]['claim_hash'][::-1].hex(), claim.claim_hash.hex()) self.assertEqual(claim_from_es[0][0]['claim_hash'][::-1].hex(), claim.claim_hash.hex())
self.assertMatchESClaim(claim_from_es[0][0], claim) self.assertMatchESClaim(claim_from_es[0][0], claim)
self._check_supports(
claim.claim_hash.hex(), expected.get('supports', []), claim_from_es[0][0]['support_amount'], expected = json.loads(await self.blockchain._cli_cmnd('getclaimsfornamebyid', name, '["' + claim_id + '"]'))
is_active_in_lbrycrd if is_active_in_lbrycrd:
) if not expected:
self.assertIsNone(claim)
return
expected['claims'][0]['lasttakeoverheight'] = expected['lasttakeoverheight']
self.assertMatchDBClaim(expected['claims'][0], claim)
self._check_supports(claim.claim_hash.hex(), expected['claims'][0].get('supports', []),
claim_from_es[0][0]['support_amount'])
else:
if 'claims' in expected and expected['claims'] is not None:
# ensure that if we do have the matching claim that it is not active
self.assertEqual(expected['claims'][0]['effectiveamount'], 0)
return claim return claim
async def assertMatchClaimIsWinning(self, name, claim_id): async def assertMatchClaimIsWinning(self, name, claim_id):
self.assertEqual(claim_id, (await self.assertMatchWinningClaim(name)).claim_hash.hex()) self.assertEqual(claim_id, (await self.assertMatchWinningClaim(name)).claim_hash.hex())
await self.assertMatchClaimsForName(name) await self.assertMatchClaimsForName(name)
def _check_supports(self, claim_id, lbrycrd_supports, es_support_amount, is_active_in_lbrycrd=True): def _check_supports(self, claim_id, lbrycrd_supports, es_support_amount):
total_amount = 0 total_lbrycrd_amount = 0.0
db = self.conductor.spv_node.server.bp.db total_es_amount = 0.0
active_es_amount = 0.0
db = self.conductor.spv_node.server.db
es_supports = db.get_supports(bytes.fromhex(claim_id))
for i, (tx_num, position, amount) in enumerate(db.get_supports(bytes.fromhex(claim_id))): # we're only concerned about active supports here, and they should match
total_amount += amount self.assertTrue(len(es_supports) >= len(lbrycrd_supports))
if is_active_in_lbrycrd:
support = lbrycrd_supports[i] for i, (tx_num, position, amount) in enumerate(es_supports):
self.assertEqual(support['txId'], db.prefix_db.tx_hash.get(tx_num, deserialize_value=False)[::-1].hex()) total_es_amount += amount
self.assertEqual(support['n'], position) valid_height = db.get_activation(tx_num, position, is_support=True)
self.assertEqual(support['height'], bisect_right(db.tx_counts, tx_num)) if valid_height > db.db_height:
self.assertEqual(support['validAtHeight'], db.get_activation(tx_num, position, is_support=True)) continue
self.assertEqual(total_amount, es_support_amount, f"lbrycrd support amount: {total_amount} vs es: {es_support_amount}") active_es_amount += amount
txid = db.prefix_db.tx_hash.get(tx_num, deserialize_value=False)[::-1].hex()
support = next(filter(lambda s: s['txid'] == txid and s['n'] == position, lbrycrd_supports))
total_lbrycrd_amount += support['amount']
self.assertEqual(support['height'], bisect_right(db.tx_counts, tx_num))
self.assertEqual(support['validatheight'], valid_height)
self.assertEqual(total_es_amount, es_support_amount)
self.assertEqual(active_es_amount, total_lbrycrd_amount)
async def assertMatchClaimsForName(self, name): async def assertMatchClaimsForName(self, name):
expected = json.loads(await self.blockchain._cli_cmnd('getclaimsforname', name)) expected = json.loads(await self.blockchain._cli_cmnd('getclaimsforname', name, "", "true"))
db = self.conductor.spv_node.server.db
db = self.conductor.spv_node.server.bp.db
# self.assertEqual(len(expected['claims']), len(db_claims.claims))
# self.assertEqual(expected['lastTakeoverHeight'], db_claims.lastTakeoverHeight)
last_takeover = json.loads(await self.blockchain._cli_cmnd('getvalueforname', name))['lastTakeoverHeight']
for c in expected['claims']: for c in expected['claims']:
c['lastTakeoverHeight'] = last_takeover c['lasttakeoverheight'] = expected['lasttakeoverheight']
claim_id = c['claimId'] claim_id = c['claimid']
claim_hash = bytes.fromhex(claim_id) claim_hash = bytes.fromhex(claim_id)
claim = db._fs_get_claim_by_hash(claim_hash) claim = db._fs_get_claim_by_hash(claim_hash)
self.assertMatchDBClaim(c, claim) self.assertMatchDBClaim(c, claim)
claim_from_es = await self.conductor.spv_node.server.bp.db.search_index.search( claim_from_es = await self.conductor.spv_node.server.session_manager.search_index.search(
claim_id=c['claimId'] claim_id=claim_id
) )
self.assertEqual(len(claim_from_es[0]), 1) self.assertEqual(len(claim_from_es[0]), 1)
self.assertEqual(claim_from_es[0][0]['claim_hash'][::-1].hex(), c['claimId']) self.assertEqual(claim_from_es[0][0]['claim_hash'][::-1].hex(), claim_id)
self.assertMatchESClaim(claim_from_es[0][0], claim) self.assertMatchESClaim(claim_from_es[0][0], claim)
self._check_supports(c['claimId'], c['supports'], claim_from_es[0][0]['support_amount']) self._check_supports(claim_id, c.get('supports', []),
claim_from_es[0][0]['support_amount'])
async def assertNameState(self, height: int, name: str, winning_claim_id: str, last_takeover_height: int,
non_winning_claims: List[ClaimStateValue]):
self.assertEqual(height, self.conductor.spv_node.server.db.db_height)
await self.assertMatchClaimIsWinning(name, winning_claim_id)
for non_winning in non_winning_claims:
claim = await self.assertMatchClaim(
name, non_winning.claim_id, is_active_in_lbrycrd=non_winning.active_in_lbrycrd
)
self.assertEqual(non_winning.activation_height, claim.activation_height)
self.assertEqual(last_takeover_height, claim.last_takeover_height)
class ResolveCommand(BaseResolveTestCase): class ResolveCommand(BaseResolveTestCase):
@ -261,19 +288,20 @@ class ResolveCommand(BaseResolveTestCase):
tx_details = await self.blockchain.get_raw_transaction(claim['txid']) tx_details = await self.blockchain.get_raw_transaction(claim['txid'])
self.assertEqual(claim['confirmations'], json.loads(tx_details)['confirmations']) self.assertEqual(claim['confirmations'], json.loads(tx_details)['confirmations'])
# FIXME : claimname/updateclaim is gone. #3480 wip, unblock #3479"
# resolve handles invalid data # resolve handles invalid data
await self.blockchain_claim_name("gibberish", hexlify(b"{'invalid':'json'}").decode(), "0.1") # await self.blockchain_claim_name("gibberish", hexlify(b"{'invalid':'json'}").decode(), "0.1")
await self.generate(1) # await self.generate(1)
response = await self.out(self.daemon.jsonrpc_resolve("lbry://gibberish")) # response = await self.out(self.daemon.jsonrpc_resolve("lbry://gibberish"))
self.assertSetEqual({'lbry://gibberish'}, set(response)) # self.assertSetEqual({'lbry://gibberish'}, set(response))
claim = response['lbry://gibberish'] # claim = response['lbry://gibberish']
self.assertEqual(claim['name'], 'gibberish') # self.assertEqual(claim['name'], 'gibberish')
self.assertNotIn('value', claim) # self.assertNotIn('value', claim)
# resolve retries # resolve retries
await self.conductor.spv_node.stop() await self.conductor.spv_node.stop()
resolve_task = asyncio.create_task(self.resolve('foo')) resolve_task = asyncio.create_task(self.resolve('foo'))
await self.conductor.spv_node.start(self.conductor.blockchain_node) await self.conductor.spv_node.start(self.conductor.lbcwallet_node)
self.assertIsNotNone((await resolve_task)['claim_id']) self.assertIsNotNone((await resolve_task)['claim_id'])
async def test_winning_by_effective_amount(self): async def test_winning_by_effective_amount(self):
@ -443,16 +471,16 @@ class ResolveCommand(BaseResolveTestCase):
self.assertEqual(one, claim6['name']) self.assertEqual(one, claim6['name'])
async def test_resolve_old_claim(self): async def test_resolve_old_claim(self):
channel = await self.daemon.jsonrpc_channel_create('@olds', '1.0') channel = await self.daemon.jsonrpc_channel_create('@olds', '1.0', blocking=True)
await self.confirm_tx(channel.id) await self.confirm_tx(channel.id)
address = channel.outputs[0].get_address(self.account.ledger) address = channel.outputs[0].get_address(self.account.ledger)
claim = generate_signed_legacy(address, channel.outputs[0]) claim = generate_signed_legacy(address, channel.outputs[0])
tx = await Transaction.claim_create('example', claim.SerializeToString(), 1, address, [self.account], self.account) tx = await Transaction.claim_create('example', claim.SerializeToString(), 1, address, [self.account], self.account)
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
response = await self.resolve('@olds/example') response = await self.resolve('@olds/example')
self.assertTrue('is_channel_signature_valid' in response, str(response))
self.assertTrue(response['is_channel_signature_valid']) self.assertTrue(response['is_channel_signature_valid'])
claim.publisherSignature.signature = bytes(reversed(claim.publisherSignature.signature)) claim.publisherSignature.signature = bytes(reversed(claim.publisherSignature.signature))
@ -460,8 +488,7 @@ class ResolveCommand(BaseResolveTestCase):
'bad_example', claim.SerializeToString(), 1, address, [self.account], self.account 'bad_example', claim.SerializeToString(), 1, address, [self.account], self.account
) )
await tx.sign([self.account]) await tx.sign([self.account])
await self.broadcast(tx) await self.broadcast_and_confirm(tx)
await self.confirm_tx(tx.id)
response = await self.resolve('bad_example') response = await self.resolve('bad_example')
self.assertFalse(response['is_channel_signature_valid']) self.assertFalse(response['is_channel_signature_valid'])
@ -606,6 +633,12 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
self.assertDictEqual(await self.resolve('@other/signed4'), self.assertDictEqual(await self.resolve('@other/signed4'),
await self.resolve('signed4')) await self.resolve('signed4'))
self.assertEqual(2, len(await self.claim_search(channel_ids=[channel_id2])))
await self.channel_update(channel_id2)
await make_claim('third_signed', '0.01', channel_id=channel_id2)
self.assertEqual(3, len(await self.claim_search(channel_ids=[channel_id2])))
async def _test_activation_delay(self): async def _test_activation_delay(self):
name = 'derp' name = 'derp'
# initially claim the name # initially claim the name
@ -643,10 +676,10 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
async def assertNameState(self, height: int, name: str, winning_claim_id: str, last_takeover_height: int, async def assertNameState(self, height: int, name: str, winning_claim_id: str, last_takeover_height: int,
non_winning_claims: List[ClaimStateValue]): non_winning_claims: List[ClaimStateValue]):
self.assertEqual(height, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(height, self.conductor.spv_node.server.db.db_height)
await self.assertMatchClaimIsWinning(name, winning_claim_id) await self.assertMatchClaimIsWinning(name, winning_claim_id)
for non_winning in non_winning_claims: for non_winning in non_winning_claims:
claim = await self.assertMatchClaim( claim = await self.assertMatchClaim(name,
non_winning.claim_id, is_active_in_lbrycrd=non_winning.active_in_lbrycrd non_winning.claim_id, is_active_in_lbrycrd=non_winning.active_in_lbrycrd
) )
self.assertEqual(non_winning.activation_height, claim.activation_height) self.assertEqual(non_winning.activation_height, claim.activation_height)
@ -961,7 +994,7 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
) )
greater_than_or_equal_to_zero = [ greater_than_or_equal_to_zero = [
claim['claim_id'] for claim in ( claim['claim_id'] for claim in (
await self.conductor.spv_node.server.bp.db.search_index.search( await self.conductor.spv_node.server.session_manager.search_index.search(
channel_id=channel_id, fee_amount=">=0" channel_id=channel_id, fee_amount=">=0"
))[0] ))[0]
] ]
@ -969,7 +1002,7 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
self.assertSetEqual(set(greater_than_or_equal_to_zero), {stream_with_no_fee, stream_with_fee}) self.assertSetEqual(set(greater_than_or_equal_to_zero), {stream_with_no_fee, stream_with_fee})
greater_than_zero = [ greater_than_zero = [
claim['claim_id'] for claim in ( claim['claim_id'] for claim in (
await self.conductor.spv_node.server.bp.db.search_index.search( await self.conductor.spv_node.server.session_manager.search_index.search(
channel_id=channel_id, fee_amount=">0" channel_id=channel_id, fee_amount=">0"
))[0] ))[0]
] ]
@ -977,7 +1010,7 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
self.assertSetEqual(set(greater_than_zero), {stream_with_fee}) self.assertSetEqual(set(greater_than_zero), {stream_with_fee})
equal_to_zero = [ equal_to_zero = [
claim['claim_id'] for claim in ( claim['claim_id'] for claim in (
await self.conductor.spv_node.server.bp.db.search_index.search( await self.conductor.spv_node.server.session_manager.search_index.search(
channel_id=channel_id, fee_amount="<=0" channel_id=channel_id, fee_amount="<=0"
))[0] ))[0]
] ]
@ -992,10 +1025,10 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
name = 'test' name = 'test'
await self.generate(494) await self.generate(494)
address = (await self.account.receiving.get_addresses(True))[0] address = (await self.account.receiving.get_addresses(True))[0]
await self.blockchain.send_to_address(address, 400.0) await self.send_to_address_and_wait(address, 400.0)
await self.account.ledger.on_address.first await self.account.ledger.on_address.first
await self.generate(100) await self.generate(100)
self.assertEqual(800, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(800, self.conductor.spv_node.server.db.db_height)
# Block 801: Claim A for 10 LBC is accepted. # Block 801: Claim A for 10 LBC is accepted.
# It is the first claim, so it immediately becomes active and controlling. # It is the first claim, so it immediately becomes active and controlling.
@ -1007,10 +1040,10 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
# Its activation height is 1121 + min(4032, floor((1121-801) / 32)) = 1121 + 10 = 1131. # Its activation height is 1121 + min(4032, floor((1121-801) / 32)) = 1121 + 10 = 1131.
# State: A(10) is controlling, B(20) is accepted. # State: A(10) is controlling, B(20) is accepted.
await self.generate(32 * 10 - 1) await self.generate(32 * 10 - 1)
self.assertEqual(1120, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1120, self.conductor.spv_node.server.db.db_height)
claim_id_B = (await self.stream_create(name, '20.0', allow_duplicate_name=True))['outputs'][0]['claim_id'] claim_id_B = (await self.stream_create(name, '20.0', allow_duplicate_name=True))['outputs'][0]['claim_id']
claim_B, _, _, _ = await self.conductor.spv_node.server.bp.db.resolve(f"{name}:{claim_id_B}") claim_B, _, _, _ = await self.conductor.spv_node.server.db.resolve(f"{name}:{claim_id_B}")
self.assertEqual(1121, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1121, self.conductor.spv_node.server.db.db_height)
self.assertEqual(1131, claim_B.activation_height) self.assertEqual(1131, claim_B.activation_height)
await self.assertMatchClaimIsWinning(name, claim_id_A) await self.assertMatchClaimIsWinning(name, claim_id_A)
@ -1018,33 +1051,33 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
# Since it is a support for the controlling claim, it activates immediately. # Since it is a support for the controlling claim, it activates immediately.
# State: A(10+14) is controlling, B(20) is accepted. # State: A(10+14) is controlling, B(20) is accepted.
await self.support_create(claim_id_A, bid='14.0') await self.support_create(claim_id_A, bid='14.0')
self.assertEqual(1122, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1122, self.conductor.spv_node.server.db.db_height)
await self.assertMatchClaimIsWinning(name, claim_id_A) await self.assertMatchClaimIsWinning(name, claim_id_A)
# Block 1123: Claim C for 50 LBC is accepted. # Block 1123: Claim C for 50 LBC is accepted.
# The activation height is 1123 + min(4032, floor((1123-801) / 32)) = 1123 + 10 = 1133. # The activation height is 1123 + min(4032, floor((1123-801) / 32)) = 1123 + 10 = 1133.
# State: A(10+14) is controlling, B(20) is accepted, C(50) is accepted. # State: A(10+14) is controlling, B(20) is accepted, C(50) is accepted.
claim_id_C = (await self.stream_create(name, '50.0', allow_duplicate_name=True))['outputs'][0]['claim_id'] claim_id_C = (await self.stream_create(name, '50.0', allow_duplicate_name=True))['outputs'][0]['claim_id']
self.assertEqual(1123, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1123, self.conductor.spv_node.server.db.db_height)
claim_C, _, _, _ = await self.conductor.spv_node.server.bp.db.resolve(f"{name}:{claim_id_C}") claim_C, _, _, _ = await self.conductor.spv_node.server.db.resolve(f"{name}:{claim_id_C}")
self.assertEqual(1133, claim_C.activation_height) self.assertEqual(1133, claim_C.activation_height)
await self.assertMatchClaimIsWinning(name, claim_id_A) await self.assertMatchClaimIsWinning(name, claim_id_A)
await self.generate(7) await self.generate(7)
self.assertEqual(1130, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1130, self.conductor.spv_node.server.db.db_height)
await self.assertMatchClaimIsWinning(name, claim_id_A) await self.assertMatchClaimIsWinning(name, claim_id_A)
await self.generate(1) await self.generate(1)
# Block 1131: Claim B activates. It has 20 LBC, while claim A has 24 LBC (10 original + 14 from support X). There is no takeover, and claim A remains controlling. # Block 1131: Claim B activates. It has 20 LBC, while claim A has 24 LBC (10 original + 14 from support X). There is no takeover, and claim A remains controlling.
# State: A(10+14) is controlling, B(20) is active, C(50) is accepted. # State: A(10+14) is controlling, B(20) is active, C(50) is accepted.
self.assertEqual(1131, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1131, self.conductor.spv_node.server.db.db_height)
await self.assertMatchClaimIsWinning(name, claim_id_A) await self.assertMatchClaimIsWinning(name, claim_id_A)
# Block 1132: Claim D for 300 LBC is accepted. The activation height is 1132 + min(4032, floor((1132-801) / 32)) = 1132 + 10 = 1142. # Block 1132: Claim D for 300 LBC is accepted. The activation height is 1132 + min(4032, floor((1132-801) / 32)) = 1132 + 10 = 1142.
# State: A(10+14) is controlling, B(20) is active, C(50) is accepted, D(300) is accepted. # State: A(10+14) is controlling, B(20) is active, C(50) is accepted, D(300) is accepted.
claim_id_D = (await self.stream_create(name, '300.0', allow_duplicate_name=True))['outputs'][0]['claim_id'] claim_id_D = (await self.stream_create(name, '300.0', allow_duplicate_name=True))['outputs'][0]['claim_id']
self.assertEqual(1132, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1132, self.conductor.spv_node.server.db.db_height)
claim_D, _, _, _ = await self.conductor.spv_node.server.bp.db.resolve(f"{name}:{claim_id_D}") claim_D, _, _, _ = await self.conductor.spv_node.server.db.resolve(f"{name}:{claim_id_D}")
self.assertEqual(False, claim_D.is_controlling) self.assertEqual(False, claim_D.is_controlling)
self.assertEqual(801, claim_D.last_takeover_height) self.assertEqual(801, claim_D.last_takeover_height)
self.assertEqual(1142, claim_D.activation_height) self.assertEqual(1142, claim_D.activation_height)
@ -1053,8 +1086,8 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
# Block 1133: Claim C activates. It has 50 LBC, while claim A has 24 LBC, so a takeover is initiated. The takeover height for this name is set to 1133, and therefore the activation delay for all the claims becomes min(4032, floor((1133-1133) / 32)) = 0. All the claims become active. The totals for each claim are recalculated, and claim D becomes controlling because it has the highest total. # Block 1133: Claim C activates. It has 50 LBC, while claim A has 24 LBC, so a takeover is initiated. The takeover height for this name is set to 1133, and therefore the activation delay for all the claims becomes min(4032, floor((1133-1133) / 32)) = 0. All the claims become active. The totals for each claim are recalculated, and claim D becomes controlling because it has the highest total.
# State: A(10+14) is active, B(20) is active, C(50) is active, D(300) is controlling # State: A(10+14) is active, B(20) is active, C(50) is active, D(300) is controlling
await self.generate(1) await self.generate(1)
self.assertEqual(1133, self.conductor.spv_node.server.bp.db.db_height) self.assertEqual(1133, self.conductor.spv_node.server.db.db_height)
claim_D, _, _, _ = await self.conductor.spv_node.server.bp.db.resolve(f"{name}:{claim_id_D}") claim_D, _, _, _ = await self.conductor.spv_node.server.db.resolve(f"{name}:{claim_id_D}")
self.assertEqual(True, claim_D.is_controlling) self.assertEqual(True, claim_D.is_controlling)
self.assertEqual(1133, claim_D.last_takeover_height) self.assertEqual(1133, claim_D.last_takeover_height)
self.assertEqual(1133, claim_D.activation_height) self.assertEqual(1133, claim_D.activation_height)
@ -1327,15 +1360,15 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
await self.generate(8) await self.generate(8)
await self.assertMatchClaimIsWinning(name, first_claim_id) await self.assertMatchClaimIsWinning(name, first_claim_id)
# abandon the support that causes the winning claim to have the highest staked # abandon the support that causes the winning claim to have the highest staked
tx = await self.daemon.jsonrpc_txo_spend(type='support', txid=controlling_support_tx.id) tx = await self.daemon.jsonrpc_txo_spend(type='support', txid=controlling_support_tx.id, blocking=True)
await self.generate(1) await self.generate(1)
await self.assertMatchClaimIsWinning(name, first_claim_id) await self.assertNameState(538, name, first_claim_id, last_takeover_height=207, non_winning_claims=[
# await self.assertMatchClaim(second_claim_id) ClaimStateValue(second_claim_id, activation_height=539, active_in_lbrycrd=False)
])
await self.generate(1) await self.generate(1)
await self.assertNameState(539, name, second_claim_id, last_takeover_height=539, non_winning_claims=[
await self.assertMatchClaim(first_claim_id) ClaimStateValue(first_claim_id, activation_height=207, active_in_lbrycrd=True)
await self.assertMatchClaimIsWinning(name, second_claim_id) ])
async def test_remove_controlling_support(self): async def test_remove_controlling_support(self):
name = 'derp' name = 'derp'
@ -1405,14 +1438,14 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
await self.generate(32) await self.generate(32)
second_claim_id = (await self.stream_create(name, '0.01', allow_duplicate_name=True))['outputs'][0]['claim_id'] second_claim_id = (await self.stream_create(name, '0.01', allow_duplicate_name=True))['outputs'][0]['claim_id']
await self.assertNoClaim(second_claim_id) await self.assertNoClaim(name, second_claim_id)
self.assertEqual( self.assertEqual(
len((await self.conductor.spv_node.server.bp.db.search_index.search(claim_name=name))[0]), 1 len((await self.conductor.spv_node.server.session_manager.search_index.search(claim_name=name))[0]), 1
) )
await self.generate(1) await self.generate(1)
await self.assertMatchClaim(second_claim_id) await self.assertMatchClaim(name, second_claim_id)
self.assertEqual( self.assertEqual(
len((await self.conductor.spv_node.server.bp.db.search_index.search(claim_name=name))[0]), 2 len((await self.conductor.spv_node.server.session_manager.search_index.search(claim_name=name))[0]), 2
) )
async def test_abandon_controlling_same_block_as_new_claim(self): async def test_abandon_controlling_same_block_as_new_claim(self):
@ -1428,35 +1461,47 @@ class ResolveClaimTakeovers(BaseResolveTestCase):
async def test_trending(self): async def test_trending(self):
async def get_trending_score(claim_id): async def get_trending_score(claim_id):
return (await self.conductor.spv_node.server.bp.db.search_index.search( return (await self.conductor.spv_node.server.session_manager.search_index.search(
claim_id=claim_id claim_id=claim_id
))[0][0]['trending_score'] ))[0][0]['trending_score']
claim_id1 = (await self.stream_create('derp', '1.0'))['outputs'][0]['claim_id'] claim_id1 = (await self.stream_create('derp', '1.0'))['outputs'][0]['claim_id']
COIN = 1E8 COIN = int(1E8)
height = 99000 self.assertEqual(self.conductor.spv_node.writer.height, 207)
self.conductor.spv_node.server.bp._add_claim_activation_change_notification( self.conductor.spv_node.writer.db.prefix_db.trending_notification.stage_put(
claim_id1, height, 0, 10 * COIN (208, bytes.fromhex(claim_id1)), (0, 10 * COIN)
) )
await self.generate(1) await self.generate(1)
self.assertEqual(172.64252836433135, await get_trending_score(claim_id1)) self.assertEqual(self.conductor.spv_node.writer.height, 208)
self.conductor.spv_node.server.bp._add_claim_activation_change_notification(
claim_id1, height + 1, 10 * COIN, 100 * COIN self.assertEqual(1.7090807854206793, await get_trending_score(claim_id1))
self.conductor.spv_node.writer.db.prefix_db.trending_notification.stage_put(
(209, bytes.fromhex(claim_id1)), (10 * COIN, 100 * COIN)
) )
await self.generate(1) await self.generate(1)
self.assertEqual(173.45931832928875, await get_trending_score(claim_id1)) self.assertEqual(self.conductor.spv_node.writer.height, 209)
self.conductor.spv_node.server.bp._add_claim_activation_change_notification( self.assertEqual(2.2437974397778886, await get_trending_score(claim_id1))
claim_id1, height + 100, 100 * COIN, 1000000 * COIN self.conductor.spv_node.writer.db.prefix_db.trending_notification.stage_put(
(309, bytes.fromhex(claim_id1)), (100 * COIN, 1000000 * COIN)
) )
await self.generate(1) await self.generate(100)
self.assertEqual(176.65517070393514, await get_trending_score(claim_id1)) self.assertEqual(self.conductor.spv_node.writer.height, 309)
self.conductor.spv_node.server.bp._add_claim_activation_change_notification( self.assertEqual(5.157053472135866, await get_trending_score(claim_id1))
claim_id1, height + 200, 1000000 * COIN, 1 * COIN
self.conductor.spv_node.writer.db.prefix_db.trending_notification.stage_put(
(409, bytes.fromhex(claim_id1)), (1000000 * COIN, 1 * COIN)
) )
await self.generate(99)
self.assertEqual(self.conductor.spv_node.writer.height, 408)
self.assertEqual(5.157053472135866, await get_trending_score(claim_id1))
await self.generate(1) await self.generate(1)
self.assertEqual(-174.951347102643, await get_trending_score(claim_id1)) self.assertEqual(self.conductor.spv_node.writer.height, 409)
search_results = (await self.conductor.spv_node.server.bp.db.search_index.search(claim_name="derp"))[0]
self.assertEqual(-3.4256156592205627, await get_trending_score(claim_id1))
search_results = (await self.conductor.spv_node.server.session_manager.search_index.search(claim_name="derp"))[0]
self.assertEqual(1, len(search_results)) self.assertEqual(1, len(search_results))
self.assertListEqual([claim_id1], [c['claim_id'] for c in search_results]) self.assertListEqual([claim_id1], [c['claim_id'] for c in search_results])
@ -1465,22 +1510,31 @@ class ResolveAfterReorg(BaseResolveTestCase):
async def reorg(self, start): async def reorg(self, start):
blocks = self.ledger.headers.height - start blocks = self.ledger.headers.height - start
self.blockchain.block_expected = start - 1 self.blockchain.block_expected = start - 1
prepare = self.ledger.on_header.where(self.blockchain.is_expected_block)
self.conductor.spv_node.server.synchronized.clear()
# go back to start # go back to start
await self.blockchain.invalidate_block((await self.ledger.headers.hash(start)).decode()) await self.blockchain.invalidate_block((await self.ledger.headers.hash(start)).decode())
# go to previous + 1 # go to previous + 1
await self.generate(blocks + 2) await self.blockchain.generate(blocks + 2)
await prepare # no guarantee that it didn't happen already, so start waiting from before calling generate
await self.conductor.spv_node.server.synchronized.wait()
# await asyncio.wait_for(self.on_header(self.blockchain.block_expected), 30.0)
async def assertBlockHash(self, height): async def assertBlockHash(self, height):
bp = self.conductor.spv_node.server.bp reader_db = self.conductor.spv_node.server.db
block_hash = await self.blockchain.get_block_hash(height) block_hash = await self.blockchain.get_block_hash(height)
self.assertEqual(block_hash, (await self.ledger.headers.hash(height)).decode()) self.assertEqual(block_hash, (await self.ledger.headers.hash(height)).decode())
self.assertEqual(block_hash, (await bp.db.fs_block_hashes(height, 1))[0][::-1].hex()) self.assertEqual(block_hash, (await reader_db.fs_block_hashes(height, 1))[0][::-1].hex())
txids = [ txids = [
tx_hash[::-1].hex() for tx_hash in bp.db.get_block_txs(height) tx_hash[::-1].hex() for tx_hash in reader_db.get_block_txs(height)
] ]
txs = await bp.db.get_transactions_and_merkles(txids) txs = await reader_db.get_transactions_and_merkles(txids)
block_txs = (await bp.daemon.deserialised_block(block_hash))['tx'] block_txs = (await self.conductor.spv_node.server.daemon.deserialised_block(block_hash))['tx']
self.assertSetEqual(set(block_txs), set(txs.keys()), msg='leveldb/lbrycrd is missing transactions') self.assertSetEqual(set(block_txs), set(txs.keys()), msg='leveldb/lbrycrd is missing transactions')
self.assertListEqual(block_txs, list(txs.keys()), msg='leveldb/lbrycrd transactions are of order') self.assertListEqual(block_txs, list(txs.keys()), msg='leveldb/lbrycrd transactions are of order')
@ -1491,9 +1545,18 @@ class ResolveAfterReorg(BaseResolveTestCase):
channel_id = self.get_claim_id( channel_id = self.get_claim_id(
await self.channel_create(channel_name, '0.01') await self.channel_create(channel_name, '0.01')
) )
self.assertEqual(channel_id, (await self.assertMatchWinningClaim(channel_name)).claim_hash.hex())
await self.assertNameState(
height=207, name='@abc', winning_claim_id=channel_id, last_takeover_height=207,
non_winning_claims=[]
)
await self.reorg(206) await self.reorg(206)
self.assertEqual(channel_id, (await self.assertMatchWinningClaim(channel_name)).claim_hash.hex())
await self.assertNameState(
height=208, name='@abc', winning_claim_id=channel_id, last_takeover_height=207,
non_winning_claims=[]
)
# await self.assertNoClaimForName(channel_name) # await self.assertNoClaimForName(channel_name)
# self.assertNotIn('error', await self.resolve(channel_name)) # self.assertNotIn('error', await self.resolve(channel_name))
@ -1502,16 +1565,29 @@ class ResolveAfterReorg(BaseResolveTestCase):
stream_id = self.get_claim_id( stream_id = self.get_claim_id(
await self.stream_create(stream_name, '0.01', channel_id=channel_id) await self.stream_create(stream_name, '0.01', channel_id=channel_id)
) )
self.assertEqual(stream_id, (await self.assertMatchWinningClaim(stream_name)).claim_hash.hex())
await self.assertNameState(
height=209, name=stream_name, winning_claim_id=stream_id, last_takeover_height=209,
non_winning_claims=[]
)
await self.reorg(206) await self.reorg(206)
self.assertEqual(stream_id, (await self.assertMatchWinningClaim(stream_name)).claim_hash.hex()) await self.assertNameState(
height=210, name=stream_name, winning_claim_id=stream_id, last_takeover_height=209,
non_winning_claims=[]
)
await self.support_create(stream_id, '0.01') await self.support_create(stream_id, '0.01')
self.assertNotIn('error', await self.resolve(stream_name))
self.assertEqual(stream_id, (await self.assertMatchWinningClaim(stream_name)).claim_hash.hex()) await self.assertNameState(
height=211, name=stream_name, winning_claim_id=stream_id, last_takeover_height=209,
non_winning_claims=[]
)
await self.reorg(206) await self.reorg(206)
# self.assertNotIn('error', await self.resolve(stream_name)) # self.assertNotIn('error', await self.resolve(stream_name))
self.assertEqual(stream_id, (await self.assertMatchWinningClaim(stream_name)).claim_hash.hex()) await self.assertNameState(
height=212, name=stream_name, winning_claim_id=stream_id, last_takeover_height=209,
non_winning_claims=[]
)
await self.stream_abandon(stream_id) await self.stream_abandon(stream_id)
self.assertNotIn('error', await self.resolve(channel_name)) self.assertNotIn('error', await self.resolve(channel_name))
@ -1553,7 +1629,6 @@ class ResolveAfterReorg(BaseResolveTestCase):
await self.ledger.wait(broadcast_tx) await self.ledger.wait(broadcast_tx)
await self.support_create(still_valid.outputs[0].claim_id, '0.01') await self.support_create(still_valid.outputs[0].claim_id, '0.01')
# await self.generate(1)
await self.ledger.wait(broadcast_tx, self.blockchain.block_expected) await self.ledger.wait(broadcast_tx, self.blockchain.block_expected)
self.assertEqual(self.ledger.headers.height, 208) self.assertEqual(self.ledger.headers.height, 208)
await self.assertBlockHash(208) await self.assertBlockHash(208)
@ -1570,7 +1645,7 @@ class ResolveAfterReorg(BaseResolveTestCase):
# reorg the last block dropping our claim tx # reorg the last block dropping our claim tx
await self.blockchain.invalidate_block(invalidated_block_hash) await self.blockchain.invalidate_block(invalidated_block_hash)
await self.blockchain.clear_mempool() await self.conductor.clear_mempool()
await self.blockchain.generate(2) await self.blockchain.generate(2)
# wait for the client to catch up and verify the reorg # wait for the client to catch up and verify the reorg
@ -1603,7 +1678,7 @@ class ResolveAfterReorg(BaseResolveTestCase):
await self.blockchain.generate(1) await self.blockchain.generate(1)
# wait for the client to catch up # wait for the client to catch up
await asyncio.wait_for(self.on_header(210), 1.0) await asyncio.wait_for(self.on_header(210), 3.0)
# verify the claim is in the new block and that it is returned by claim_search # verify the claim is in the new block and that it is returned by claim_search
republished = await self.resolve('hovercraft') republished = await self.resolve('hovercraft')
@ -1649,11 +1724,11 @@ class ResolveAfterReorg(BaseResolveTestCase):
# reorg the last block dropping our claim tx # reorg the last block dropping our claim tx
await self.blockchain.invalidate_block(invalidated_block_hash) await self.blockchain.invalidate_block(invalidated_block_hash)
await self.blockchain.clear_mempool() await self.conductor.clear_mempool()
await self.blockchain.generate(2) await self.blockchain.generate(2)
# wait for the client to catch up and verify the reorg # wait for the client to catch up and verify the reorg
await asyncio.wait_for(self.on_header(209), 3.0) await asyncio.wait_for(self.on_header(209), 30.0)
await self.assertBlockHash(207) await self.assertBlockHash(207)
await self.assertBlockHash(208) await self.assertBlockHash(208)
await self.assertBlockHash(209) await self.assertBlockHash(209)

View file

@ -3,7 +3,7 @@ import asyncio
from lbry.testcase import IntegrationTestCase from lbry.testcase import IntegrationTestCase
import lbry.wallet import lbry.wallet
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.wallet.transaction import Transaction, Output, Input from lbry.wallet.transaction import Transaction, Output, Input
from lbry.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d from lbry.wallet.dewies import dewies_to_lbc as d2l, lbc_to_dewies as l2d
@ -21,9 +21,8 @@ class BasicTransactionTest(IntegrationTestCase):
[asyncio.ensure_future(self.on_address_update(address1)), [asyncio.ensure_future(self.on_address_update(address1)),
asyncio.ensure_future(self.on_address_update(address2))] asyncio.ensure_future(self.on_address_update(address2))]
)) ))
sendtxid1 = await self.blockchain.send_to_address(address1, 5) await self.send_to_address_and_wait(address1, 5)
sendtxid2 = await self.blockchain.send_to_address(address2, 5) await self.send_to_address_and_wait(address2, 5, 1)
await self.blockchain.generate(1)
await notifications await notifications
self.assertEqual(d2l(await self.account.get_balance()), '10.0') self.assertEqual(d2l(await self.account.get_balance()), '10.0')
@ -57,7 +56,7 @@ class BasicTransactionTest(IntegrationTestCase):
notifications = asyncio.create_task(asyncio.wait( notifications = asyncio.create_task(asyncio.wait(
[asyncio.ensure_future(self.ledger.wait(channel_tx)), asyncio.ensure_future(self.ledger.wait(stream_tx))] [asyncio.ensure_future(self.ledger.wait(channel_tx)), asyncio.ensure_future(self.ledger.wait(stream_tx))]
)) ))
await self.blockchain.generate(1) await self.generate(1)
await notifications await notifications
self.assertEqual(d2l(await self.account.get_balance()), '7.985786') self.assertEqual(d2l(await self.account.get_balance()), '7.985786')
self.assertEqual(d2l(await self.account.get_balance(include_claims=True)), '9.985786') self.assertEqual(d2l(await self.account.get_balance(include_claims=True)), '9.985786')
@ -70,7 +69,7 @@ class BasicTransactionTest(IntegrationTestCase):
await self.broadcast(abandon_tx) await self.broadcast(abandon_tx)
await notify await notify
notify = asyncio.create_task(self.ledger.wait(abandon_tx)) notify = asyncio.create_task(self.ledger.wait(abandon_tx))
await self.blockchain.generate(1) await self.generate(1)
await notify await notify
response = await self.ledger.resolve([], ['lbry://@bar/foo']) response = await self.ledger.resolve([], ['lbry://@bar/foo'])

View file

@ -1,3 +1,5 @@
import unittest
from lbry.testcase import CommandTestCase from lbry.testcase import CommandTestCase
@ -17,7 +19,7 @@ class TransactionCommandsTestCase(CommandTestCase):
async def test_transaction_show(self): async def test_transaction_show(self):
# local tx # local tx
result = await self.out(self.daemon.jsonrpc_account_send( result = await self.out(self.daemon.jsonrpc_account_send(
'5.0', await self.daemon.jsonrpc_address_unused(self.account.id) '5.0', await self.daemon.jsonrpc_address_unused(self.account.id), blocking=True
)) ))
await self.confirm_tx(result['txid']) await self.confirm_tx(result['txid'])
tx = await self.daemon.jsonrpc_transaction_show(result['txid']) tx = await self.daemon.jsonrpc_transaction_show(result['txid'])
@ -38,10 +40,9 @@ class TransactionCommandsTestCase(CommandTestCase):
self.assertFalse(result['success']) self.assertFalse(result['success'])
async def test_utxo_release(self): async def test_utxo_release(self):
sendtxid = await self.blockchain.send_to_address( await self.send_to_address_and_wait(
await self.account.receiving.get_or_create_usable_address(), 1 await self.account.receiving.get_or_create_usable_address(), 1, 1
) )
await self.confirm_tx(sendtxid)
await self.assertBalance(self.account, '11.0') await self.assertBalance(self.account, '11.0')
await self.ledger.reserve_outputs(await self.account.get_utxos()) await self.ledger.reserve_outputs(await self.account.get_utxos())
await self.assertBalance(self.account, '0.0') await self.assertBalance(self.account, '0.0')
@ -51,6 +52,7 @@ class TransactionCommandsTestCase(CommandTestCase):
class TestSegwit(CommandTestCase): class TestSegwit(CommandTestCase):
@unittest.SkipTest
async def test_segwit(self): async def test_segwit(self):
p2sh_address1 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS) p2sh_address1 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS)
p2sh_address2 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS) p2sh_address2 = await self.blockchain.get_new_address(self.blockchain.P2SH_SEGWIT_ADDRESS)
@ -64,14 +66,13 @@ class TestSegwit(CommandTestCase):
p2sh_txid2 = await self.blockchain.send_to_address(p2sh_address2, '1.0') p2sh_txid2 = await self.blockchain.send_to_address(p2sh_address2, '1.0')
bech32_txid1 = await self.blockchain.send_to_address(bech32_address1, '1.0') bech32_txid1 = await self.blockchain.send_to_address(bech32_address1, '1.0')
bech32_txid2 = await self.blockchain.send_to_address(bech32_address2, '1.0') bech32_txid2 = await self.blockchain.send_to_address(bech32_address2, '1.0')
await self.generate(1) await self.generate(1)
# P2SH & BECH32 can pay to P2SH address # P2SH & BECH32 can pay to P2SH address
tx = await self.blockchain.create_raw_transaction([ tx = await self.blockchain.create_raw_transaction([
{"txid": p2sh_txid1, "vout": 0}, {"txid": p2sh_txid1, "vout": 0},
{"txid": bech32_txid1, "vout": 0}, {"txid": bech32_txid1, "vout": 0},
], [{p2sh_address3: '1.9'}] ], {p2sh_address3: 1.9}
) )
tx = await self.blockchain.sign_raw_transaction_with_wallet(tx) tx = await self.blockchain.sign_raw_transaction_with_wallet(tx)
p2sh_txid3 = await self.blockchain.send_raw_transaction(tx) p2sh_txid3 = await self.blockchain.send_raw_transaction(tx)
@ -82,7 +83,7 @@ class TestSegwit(CommandTestCase):
tx = await self.blockchain.create_raw_transaction([ tx = await self.blockchain.create_raw_transaction([
{"txid": p2sh_txid2, "vout": 0}, {"txid": p2sh_txid2, "vout": 0},
{"txid": bech32_txid2, "vout": 0}, {"txid": bech32_txid2, "vout": 0},
], [{bech32_address3: '1.9'}] ], {bech32_address3: 1.9}
) )
tx = await self.blockchain.sign_raw_transaction_with_wallet(tx) tx = await self.blockchain.sign_raw_transaction_with_wallet(tx)
bech32_txid3 = await self.blockchain.send_raw_transaction(tx) bech32_txid3 = await self.blockchain.send_raw_transaction(tx)
@ -94,12 +95,9 @@ class TestSegwit(CommandTestCase):
tx = await self.blockchain.create_raw_transaction([ tx = await self.blockchain.create_raw_transaction([
{"txid": p2sh_txid3, "vout": 0}, {"txid": p2sh_txid3, "vout": 0},
{"txid": bech32_txid3, "vout": 0}, {"txid": bech32_txid3, "vout": 0},
], [{address: '3.5'}] ], {address: 3.5}
) )
tx = await self.blockchain.sign_raw_transaction_with_wallet(tx) tx = await self.blockchain.sign_raw_transaction_with_wallet(tx)
txid = await self.blockchain.send_raw_transaction(tx) txid = await self.blockchain.send_raw_transaction(tx)
await self.on_transaction_id(txid) await self.generate_and_wait(1, [txid])
await self.generate(1)
await self.on_transaction_id(txid)
await self.assertBalance(self.account, '13.5') await self.assertBalance(self.account, '13.5')

View file

@ -1,7 +1,7 @@
import asyncio import asyncio
import random import random
from itertools import chain
import lbry.wallet.rpc.jsonrpc
from lbry.wallet.transaction import Transaction, Output, Input from lbry.wallet.transaction import Transaction, Output, Input
from lbry.testcase import IntegrationTestCase from lbry.testcase import IntegrationTestCase
from lbry.wallet.util import satoshis_to_coins, coins_to_satoshis from lbry.wallet.util import satoshis_to_coins, coins_to_satoshis
@ -9,9 +9,8 @@ from lbry.wallet.manager import WalletManager
class BasicTransactionTests(IntegrationTestCase): class BasicTransactionTests(IntegrationTestCase):
async def test_variety_of_transactions_and_longish_history(self): async def test_variety_of_transactions_and_longish_history(self):
await self.blockchain.generate(300) await self.generate(300)
await self.assertBalance(self.account, '0.0') await self.assertBalance(self.account, '0.0')
addresses = await self.account.receiving.get_addresses() addresses = await self.account.receiving.get_addresses()
@ -19,10 +18,10 @@ class BasicTransactionTests(IntegrationTestCase):
# to the 10th receiving address for a total of 30 UTXOs on the entire account # to the 10th receiving address for a total of 30 UTXOs on the entire account
for i in range(10): for i in range(10):
notification = asyncio.ensure_future(self.on_address_update(addresses[i])) notification = asyncio.ensure_future(self.on_address_update(addresses[i]))
txid = await self.blockchain.send_to_address(addresses[i], 10) _ = await self.send_to_address_and_wait(addresses[i], 10)
await notification await notification
notification = asyncio.ensure_future(self.on_address_update(addresses[9])) notification = asyncio.ensure_future(self.on_address_update(addresses[9]))
txid = await self.blockchain.send_to_address(addresses[9], 10) _ = await self.send_to_address_and_wait(addresses[9], 10)
await notification await notification
# use batching to reduce issues with send_to_address on cli # use batching to reduce issues with send_to_address on cli
@ -57,7 +56,7 @@ class BasicTransactionTests(IntegrationTestCase):
for tx in await self.ledger.db.get_transactions(txid__in=[tx.id for tx in txs]) for tx in await self.ledger.db.get_transactions(txid__in=[tx.id for tx in txs])
])) ]))
await self.blockchain.generate(1) await self.generate(1)
await asyncio.wait([self.ledger.wait(tx) for tx in txs]) await asyncio.wait([self.ledger.wait(tx) for tx in txs])
await self.assertBalance(self.account, '199.99876') await self.assertBalance(self.account, '199.99876')
@ -74,7 +73,7 @@ class BasicTransactionTests(IntegrationTestCase):
) )
await self.broadcast(tx) await self.broadcast(tx)
await self.ledger.wait(tx) await self.ledger.wait(tx)
await self.blockchain.generate(1) await self.generate(1)
await self.ledger.wait(tx) await self.ledger.wait(tx)
self.assertEqual(2, await self.account.get_utxo_count()) # 199 + change self.assertEqual(2, await self.account.get_utxo_count()) # 199 + change
@ -88,12 +87,10 @@ class BasicTransactionTests(IntegrationTestCase):
await self.assertBalance(account2, '0.0') await self.assertBalance(account2, '0.0')
addresses = await account1.receiving.get_addresses() addresses = await account1.receiving.get_addresses()
txids = await asyncio.gather(*( txids = []
self.blockchain.send_to_address(address, 1.1) for address in addresses[:5] for address in addresses[:5]:
)) txids.append(await self.send_to_address_and_wait(address, 1.1))
await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # mempool await self.generate_and_wait(1, txids)
await self.blockchain.generate(1)
await asyncio.wait([self.on_transaction_id(txid) for txid in txids]) # confirmed
await self.assertBalance(account1, '5.5') await self.assertBalance(account1, '5.5')
await self.assertBalance(account2, '0.0') await self.assertBalance(account2, '0.0')
@ -107,7 +104,7 @@ class BasicTransactionTests(IntegrationTestCase):
) )
await self.broadcast(tx) await self.broadcast(tx)
await self.ledger.wait(tx) # mempool await self.ledger.wait(tx) # mempool
await self.blockchain.generate(1) await self.generate(1)
await self.ledger.wait(tx) # confirmed await self.ledger.wait(tx) # confirmed
await self.assertBalance(account1, '3.499802') await self.assertBalance(account1, '3.499802')
@ -121,7 +118,7 @@ class BasicTransactionTests(IntegrationTestCase):
) )
await self.broadcast(tx) await self.broadcast(tx)
await self.ledger.wait(tx) # mempool await self.ledger.wait(tx) # mempool
await self.blockchain.generate(1) await self.generate(1)
await self.ledger.wait(tx) # confirmed await self.ledger.wait(tx) # confirmed
tx = (await account1.get_transactions(include_is_my_input=True, include_is_my_output=True))[1] tx = (await account1.get_transactions(include_is_my_input=True, include_is_my_output=True))[1]
@ -133,11 +130,11 @@ class BasicTransactionTests(IntegrationTestCase):
self.assertTrue(tx.outputs[1].is_internal_transfer) self.assertTrue(tx.outputs[1].is_internal_transfer)
async def test_history_edge_cases(self): async def test_history_edge_cases(self):
await self.blockchain.generate(300) await self.generate(300)
await self.assertBalance(self.account, '0.0') await self.assertBalance(self.account, '0.0')
address = await self.account.receiving.get_or_create_usable_address() address = await self.account.receiving.get_or_create_usable_address()
# evil trick: mempool is unsorted on real life, but same order between python instances. reproduce it # evil trick: mempool is unsorted on real life, but same order between python instances. reproduce it
original_summary = self.conductor.spv_node.server.bp.mempool.transaction_summaries original_summary = self.conductor.spv_node.server.mempool.transaction_summaries
def random_summary(*args, **kwargs): def random_summary(*args, **kwargs):
summary = original_summary(*args, **kwargs) summary = original_summary(*args, **kwargs)
@ -146,13 +143,10 @@ class BasicTransactionTests(IntegrationTestCase):
while summary == ordered: while summary == ordered:
random.shuffle(summary) random.shuffle(summary)
return summary return summary
self.conductor.spv_node.server.bp.mempool.transaction_summaries = random_summary self.conductor.spv_node.server.mempool.transaction_summaries = random_summary
# 10 unconfirmed txs, all from blockchain wallet # 10 unconfirmed txs, all from blockchain wallet
sends = [self.blockchain.send_to_address(address, 10) for _ in range(10)] for i in range(10):
# use batching to reduce issues with send_to_address on cli await self.send_to_address_and_wait(address, 10)
for batch in range(0, len(sends), 10):
txids = await asyncio.gather(*sends[batch:batch + 10])
await asyncio.wait([self.on_transaction_id(txid) for txid in txids])
remote_status = await self.ledger.network.subscribe_address(address) remote_status = await self.ledger.network.subscribe_address(address)
self.assertTrue(await self.ledger.update_history(address, remote_status)) self.assertTrue(await self.ledger.update_history(address, remote_status))
# 20 unconfirmed txs, 10 from blockchain, 10 from local to local # 20 unconfirmed txs, 10 from blockchain, 10 from local to local
@ -170,8 +164,7 @@ class BasicTransactionTests(IntegrationTestCase):
remote_status = await self.ledger.network.subscribe_address(address) remote_status = await self.ledger.network.subscribe_address(address)
self.assertTrue(await self.ledger.update_history(address, remote_status)) self.assertTrue(await self.ledger.update_history(address, remote_status))
# server history grows unordered # server history grows unordered
txid = await self.blockchain.send_to_address(address, 1) await self.send_to_address_and_wait(address, 1)
await self.on_transaction_id(txid)
self.assertTrue(await self.ledger.update_history(address, remote_status)) self.assertTrue(await self.ledger.update_history(address, remote_status))
self.assertEqual(21, len((await self.ledger.get_local_status_and_history(address))[1])) self.assertEqual(21, len((await self.ledger.get_local_status_and_history(address))[1]))
self.assertEqual(0, len(self.ledger._known_addresses_out_of_sync)) self.assertEqual(0, len(self.ledger._known_addresses_out_of_sync))
@ -195,37 +188,37 @@ class BasicTransactionTests(IntegrationTestCase):
self.ledger, 2000000000000, [self.account], set_reserved=False, return_insufficient_funds=True self.ledger, 2000000000000, [self.account], set_reserved=False, return_insufficient_funds=True
) )
got_amounts = [estimator.effective_amount for estimator in spendable] got_amounts = [estimator.effective_amount for estimator in spendable]
self.assertListEqual(amounts, got_amounts) self.assertListEqual(sorted(amounts), sorted(got_amounts))
async def test_sqlite_coin_chooser(self): async def test_sqlite_coin_chooser(self):
wallet_manager = WalletManager([self.wallet], {self.ledger.get_id(): self.ledger}) wallet_manager = WalletManager([self.wallet], {self.ledger.get_id(): self.ledger})
await self.blockchain.generate(300) await self.generate(300)
await self.assertBalance(self.account, '0.0') await self.assertBalance(self.account, '0.0')
address = await self.account.receiving.get_or_create_usable_address() address = await self.account.receiving.get_or_create_usable_address()
other_account = self.wallet.generate_account(self.ledger) other_account = self.wallet.generate_account(self.ledger)
other_address = await other_account.receiving.get_or_create_usable_address() other_address = await other_account.receiving.get_or_create_usable_address()
self.ledger.coin_selection_strategy = 'sqlite' self.ledger.coin_selection_strategy = 'sqlite'
await self.ledger.subscribe_account(self.account) await self.ledger.subscribe_account(other_account)
accepted = asyncio.ensure_future(self.on_address_update(address)) accepted = asyncio.ensure_future(self.on_address_update(address))
txid = await self.blockchain.send_to_address(address, 1.0) _ = await self.send_to_address_and_wait(address, 1.0)
await accepted await accepted
accepted = asyncio.ensure_future(self.on_address_update(address)) accepted = asyncio.ensure_future(self.on_address_update(address))
txid = await self.blockchain.send_to_address(address, 1.0) _ = await self.send_to_address_and_wait(address, 1.0)
await accepted await accepted
accepted = asyncio.ensure_future(self.on_address_update(address)) accepted = asyncio.ensure_future(self.on_address_update(address))
txid = await self.blockchain.send_to_address(address, 3.0) _ = await self.send_to_address_and_wait(address, 3.0)
await accepted await accepted
accepted = asyncio.ensure_future(self.on_address_update(address)) accepted = asyncio.ensure_future(self.on_address_update(address))
txid = await self.blockchain.send_to_address(address, 5.0) _ = await self.send_to_address_and_wait(address, 5.0)
await accepted await accepted
accepted = asyncio.ensure_future(self.on_address_update(address)) accepted = asyncio.ensure_future(self.on_address_update(address))
txid = await self.blockchain.send_to_address(address, 10.0) _ = await self.send_to_address_and_wait(address, 10.0)
await accepted await accepted
await self.assertBalance(self.account, '20.0') await self.assertBalance(self.account, '20.0')
@ -266,6 +259,12 @@ class BasicTransactionTests(IntegrationTestCase):
async def broadcast(tx): async def broadcast(tx):
try: try:
return await real_broadcast(tx) return await real_broadcast(tx)
except lbry.wallet.rpc.jsonrpc.RPCError as err:
# this is expected in tests where we try to double spend.
if 'the transaction was rejected by network rules.' in str(err):
pass
else:
raise err
finally: finally:
e.set() e.set()

View file

@ -1,7 +1,7 @@
import asyncio import asyncio
from decimal import Decimal from decimal import Decimal
from time import time from time import time
from lbry.schema.claim import Claim from scribe.schema.claim import Claim
from lbry.extras.daemon.exchange_rate_manager import ( from lbry.extras.daemon.exchange_rate_manager import (
ExchangeRate, ExchangeRateManager, CurrencyConversionError, ExchangeRate, ExchangeRateManager, CurrencyConversionError,
BittrexUSDFeed, BittrexBTCFeed, BittrexUSDFeed, BittrexBTCFeed,

View file

@ -1,5 +1,5 @@
import unittest import unittest
from lbry.schema import mime_types from scribe.schema import mime_types
class TestMimeTypes(unittest.TestCase): class TestMimeTypes(unittest.TestCase):

Some files were not shown because too many files have changed in this diff Show more