Merge branch 'master' into improve-install-md

This commit is contained in:
Alex Grin 2021-07-19 14:20:39 -04:00 committed by GitHub
commit 3c89ecafdd
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
9 changed files with 118 additions and 6 deletions

View file

@ -13,6 +13,7 @@ RUN apt-get update && \
wget \
tar unzip \
build-essential \
automake libtool \
pkg-config \
libleveldb-dev \
python3.7 \

View file

@ -11,6 +11,7 @@ ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
automake libtool \
tar unzip \
build-essential \
pkg-config \

View file

@ -481,6 +481,10 @@ class UPnPComponent(Component):
log.info("external ip changed from %s to %s", self.external_ip, external_ip)
if external_ip:
self.external_ip = external_ip
dht_component = self.component_manager.get_component(DHT_COMPONENT)
if dht_component:
dht_node = dht_component.component
dht_node.protocol.external_ip = external_ip
# assert self.external_ip is not None # TODO: handle going/starting offline
if not self.upnp_redirects and self.upnp: # setup missing redirects

View file

@ -186,7 +186,7 @@ class UPbitBTCFeed(MarketFeed):
params = {"markets": "BTC-LBC"}
def get_rate_from_response(self, json_response):
if len(json_response) != 1 or 'trade_price' not in json_response[0]:
if "error" in json_response or len(json_response) != 1 or 'trade_price' not in json_response[0]:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response[0]['trade_price'])

View file

@ -96,6 +96,7 @@ class AddressManager:
return [r['address'] for r in records]
async def get_or_create_usable_address(self) -> str:
async with self.address_generator_lock:
addresses = await self.get_addresses(only_usable=True, limit=10)
if addresses:
return random.choice(addresses)

View file

@ -1,3 +1,19 @@
"""A simple script that attempts to directly download a single blob.
To Do:
------
Currently `lbrynet blob get <hash>` does not work to download single blobs
which are not already present in the system. The function locks up and
never returns.
It only works for blobs that are in the `blobfiles` directory already.
This bug is reported in lbryio/lbry-sdk, issue #2070.
Maybe this script can be investigated, and certain parts can be added to
`lbry.extras.daemon.daemon.jsonrpc_blob_get`
in order to solve the previous issue, and finally download single blobs
from the network (peers or reflector servers).
"""
import sys
import os
import asyncio
@ -47,7 +63,11 @@ async def main(blob_hash: str, url: str):
print(f"deleted {blob_hash}")
if __name__ == "__main__": # usage: python download_blob_from_peer.py <blob_hash> [host url:port]
if __name__ == "__main__":
if len(sys.argv) < 2:
print("usage: download_blob_from_peer.py <blob_hash> [host_url:port]")
sys.exit(1)
url = 'reflector.lbry.com:5567'
if len(sys.argv) > 2:
url = sys.argv[2]

View file

@ -0,0 +1,71 @@
import asyncio
from aiohttp import web
from lbry.blob_exchange.serialization import BlobRequest, BlobResponse
from lbry.dht.constants import generate_id
from lbry.dht.node import Node
from lbry.dht.peer import make_kademlia_peer, PeerManager
from lbry.extras.daemon.storage import SQLiteStorage
loop = asyncio.get_event_loop()
NODE = Node(
loop, PeerManager(loop), generate_id(), 60600, 60600, 3333, None,
storage=SQLiteStorage(None, ":memory:", loop, loop.time)
)
async def check_p2p(ip, port):
writer = None
try:
reader, writer = await asyncio.open_connection(ip, port)
writer.write(BlobRequest.make_request_for_blob_hash('0'*96).serialize())
return BlobResponse.deserialize(await reader.readuntil(b'}')).get_address_response().lbrycrd_address
except OSError:
return None
finally:
if writer:
writer.close()
await writer.wait_closed()
async def check_dht(ip, port):
peer = make_kademlia_peer(None, ip, udp_port=int(port))
return await NODE.protocol.get_rpc_peer(peer).ping()
async def endpoint_p2p(request):
p2p_port = request.match_info.get('p2p_port', "3333")
try:
address = await asyncio.wait_for(check_p2p(request.remote, p2p_port), 3)
except asyncio.TimeoutError:
address = None
return {"status": address is not None, "port": p2p_port, "payment_address": address}
async def endpoint_dht(request):
dht_port = request.match_info.get('dht_port', "3333")
try:
response = await check_dht(request.remote, dht_port)
except asyncio.TimeoutError:
response = None
return {"status": response == b'pong', "port": dht_port}
async def endpoint_default(request):
return {"dht_status": await endpoint_dht(request), "p2p_status": await endpoint_p2p(request)}
def as_json_response_wrapper(endpoint):
async def json_endpoint(*args, **kwargs):
return web.json_response(await endpoint(*args, **kwargs))
return json_endpoint
app = web.Application()
app.add_routes([web.get('/', as_json_response_wrapper(endpoint_default)),
web.get('/dht/{dht_port}', as_json_response_wrapper(endpoint_dht)),
web.get('/p2p/{p2p_port}', as_json_response_wrapper(endpoint_p2p))])
if __name__ == '__main__':
loop.create_task(NODE.start_listening("0.0.0.0"))
web.run_app(app, port=60666)

View file

@ -1,4 +1,5 @@
import asyncio
import time
import unittest
import typing
from lbry.testcase import AsyncioTestCase
@ -92,11 +93,11 @@ class TestNodePingQueueDiscover(AsyncioTestCase):
class TestTemporarilyLosingConnection(AsyncioTestCase):
@unittest.SkipTest
TIMEOUT = None # not supported as it advances time
async def test_losing_connection(self):
async def wait_for(check_ok, insist, timeout=20):
start = loop.time()
while loop.time() - start < timeout:
start = time.time()
while time.time() - start < timeout:
if check_ok():
break
await asyncio.sleep(0)

View file

@ -1,3 +1,4 @@
import asyncio
from binascii import hexlify
from lbry.testcase import AsyncioTestCase
from lbry.wallet import Wallet, Ledger, Database, Headers, Account, SingleKey, HierarchicalDeterministic
@ -37,6 +38,18 @@ class TestAccount(AsyncioTestCase):
addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 6)
async def test_unused_address_on_account_creation_does_not_cause_a_race(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum')
await account.ledger.db.db.executescript("update pubkey_address set used_times=10")
await account.receiving.address_generator_lock.acquire()
delayed1 = asyncio.ensure_future(account.receiving.ensure_address_gap())
delayed = asyncio.ensure_future(account.receiving.get_or_create_usable_address())
await asyncio.sleep(0)
# wallet being created and queried at the same time
account.receiving.address_generator_lock.release()
await delayed1
await delayed
async def test_generate_keys_over_batch_threshold_saves_it_properly(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum')
async with account.receiving.address_generator_lock: