Merge branch 'master' into propagate_external_ip_change

This commit is contained in:
Alex Grin 2021-07-19 14:12:19 -04:00 committed by GitHub
commit 74116cc550
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
11 changed files with 113 additions and 7 deletions

View file

@ -136,7 +136,6 @@ build:windows:
- pip install virtualenv pywin32 - pip install virtualenv pywin32
- virtualenv venv - virtualenv venv
- venv/Scripts/activate.ps1 - venv/Scripts/activate.ps1
- pip install pip==19.3.1; $true # $true ignores errors. need this to get the correct coincurve wheel. see commit notes for details.
after_script: after_script:
- rmdir -Recurse venv - rmdir -Recurse venv
script: script:

View file

@ -13,6 +13,7 @@ RUN apt-get update && \
wget \ wget \
tar unzip \ tar unzip \
build-essential \ build-essential \
automake libtool \
pkg-config \ pkg-config \
libleveldb-dev \ libleveldb-dev \
python3.7 \ python3.7 \

View file

@ -11,6 +11,7 @@ ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \ RUN apt-get update && \
apt-get -y --no-install-recommends install \ apt-get -y --no-install-recommends install \
wget \ wget \
automake libtool \
tar unzip \ tar unzip \
build-essential \ build-essential \
pkg-config \ pkg-config \

View file

@ -186,7 +186,7 @@ class UPbitBTCFeed(MarketFeed):
params = {"markets": "BTC-LBC"} params = {"markets": "BTC-LBC"}
def get_rate_from_response(self, json_response): def get_rate_from_response(self, json_response):
if len(json_response) != 1 or 'trade_price' not in json_response[0]: if "error" in json_response or len(json_response) != 1 or 'trade_price' not in json_response[0]:
raise InvalidExchangeRateResponseError(self.name, 'result not found') raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response[0]['trade_price']) return 1.0 / float(json_response[0]['trade_price'])

View file

@ -96,6 +96,7 @@ class AddressManager:
return [r['address'] for r in records] return [r['address'] for r in records]
async def get_or_create_usable_address(self) -> str: async def get_or_create_usable_address(self) -> str:
async with self.address_generator_lock:
addresses = await self.get_addresses(only_usable=True, limit=10) addresses = await self.get_addresses(only_usable=True, limit=10)
if addresses: if addresses:
return random.choice(addresses) return random.choice(addresses)

View file

@ -1,3 +1,19 @@
"""A simple script that attempts to directly download a single blob.
To Do:
------
Currently `lbrynet blob get <hash>` does not work to download single blobs
which are not already present in the system. The function locks up and
never returns.
It only works for blobs that are in the `blobfiles` directory already.
This bug is reported in lbryio/lbry-sdk, issue #2070.
Maybe this script can be investigated, and certain parts can be added to
`lbry.extras.daemon.daemon.jsonrpc_blob_get`
in order to solve the previous issue, and finally download single blobs
from the network (peers or reflector servers).
"""
import sys import sys
import os import os
import asyncio import asyncio
@ -47,7 +63,11 @@ async def main(blob_hash: str, url: str):
print(f"deleted {blob_hash}") print(f"deleted {blob_hash}")
if __name__ == "__main__": # usage: python download_blob_from_peer.py <blob_hash> [host url:port] if __name__ == "__main__":
if len(sys.argv) < 2:
print("usage: download_blob_from_peer.py <blob_hash> [host_url:port]")
sys.exit(1)
url = 'reflector.lbry.com:5567' url = 'reflector.lbry.com:5567'
if len(sys.argv) > 2: if len(sys.argv) > 2:
url = sys.argv[2] url = sys.argv[2]

View file

@ -2,7 +2,7 @@ import time
import asyncio import asyncio
import random import random
from argparse import ArgumentParser from argparse import ArgumentParser
from lbry.wallet.client.basenetwork import ClientSession from lbry.wallet.network import ClientSession
class AgentSmith(ClientSession): class AgentSmith(ClientSession):

View file

@ -1,5 +1,5 @@
import asyncio import asyncio
from lbry.wallet.client.basenetwork import ClientSession from lbry.wallet.network import ClientSession
from lbry.wallet.rpc.jsonrpc import RPCError from lbry.wallet.rpc.jsonrpc import RPCError
import logging import logging
import json import json

View file

@ -0,0 +1,71 @@
import asyncio
from aiohttp import web
from lbry.blob_exchange.serialization import BlobRequest, BlobResponse
from lbry.dht.constants import generate_id
from lbry.dht.node import Node
from lbry.dht.peer import make_kademlia_peer, PeerManager
from lbry.extras.daemon.storage import SQLiteStorage
loop = asyncio.get_event_loop()
NODE = Node(
loop, PeerManager(loop), generate_id(), 60600, 60600, 3333, None,
storage=SQLiteStorage(None, ":memory:", loop, loop.time)
)
async def check_p2p(ip, port):
writer = None
try:
reader, writer = await asyncio.open_connection(ip, port)
writer.write(BlobRequest.make_request_for_blob_hash('0'*96).serialize())
return BlobResponse.deserialize(await reader.readuntil(b'}')).get_address_response().lbrycrd_address
except OSError:
return None
finally:
if writer:
writer.close()
await writer.wait_closed()
async def check_dht(ip, port):
peer = make_kademlia_peer(None, ip, udp_port=int(port))
return await NODE.protocol.get_rpc_peer(peer).ping()
async def endpoint_p2p(request):
p2p_port = request.match_info.get('p2p_port', "3333")
try:
address = await asyncio.wait_for(check_p2p(request.remote, p2p_port), 3)
except asyncio.TimeoutError:
address = None
return {"status": address is not None, "port": p2p_port, "payment_address": address}
async def endpoint_dht(request):
dht_port = request.match_info.get('dht_port', "3333")
try:
response = await check_dht(request.remote, dht_port)
except asyncio.TimeoutError:
response = None
return {"status": response == b'pong', "port": dht_port}
async def endpoint_default(request):
return {"dht_status": await endpoint_dht(request), "p2p_status": await endpoint_p2p(request)}
def as_json_response_wrapper(endpoint):
async def json_endpoint(*args, **kwargs):
return web.json_response(await endpoint(*args, **kwargs))
return json_endpoint
app = web.Application()
app.add_routes([web.get('/', as_json_response_wrapper(endpoint_default)),
web.get('/dht/{dht_port}', as_json_response_wrapper(endpoint_dht)),
web.get('/p2p/{p2p_port}', as_json_response_wrapper(endpoint_p2p))])
if __name__ == '__main__':
loop.create_task(NODE.start_listening("0.0.0.0"))
web.run_app(app, port=60666)

View file

@ -51,7 +51,7 @@ setup(
'docopt==0.6.2', 'docopt==0.6.2',
'hachoir', 'hachoir',
'multidict==4.6.1', 'multidict==4.6.1',
'coincurve==11.0.0', 'coincurve==15.0.0',
'pbkdf2==1.3', 'pbkdf2==1.3',
'attrs==18.2.0', 'attrs==18.2.0',
'pylru==1.1.0', 'pylru==1.1.0',

View file

@ -1,3 +1,4 @@
import asyncio
from binascii import hexlify from binascii import hexlify
from lbry.testcase import AsyncioTestCase from lbry.testcase import AsyncioTestCase
from lbry.wallet import Wallet, Ledger, Database, Headers, Account, SingleKey, HierarchicalDeterministic from lbry.wallet import Wallet, Ledger, Database, Headers, Account, SingleKey, HierarchicalDeterministic
@ -37,6 +38,18 @@ class TestAccount(AsyncioTestCase):
addresses = await account.change.get_addresses() addresses = await account.change.get_addresses()
self.assertEqual(len(addresses), 6) self.assertEqual(len(addresses), 6)
async def test_unused_address_on_account_creation_does_not_cause_a_race(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum')
await account.ledger.db.db.executescript("update pubkey_address set used_times=10")
await account.receiving.address_generator_lock.acquire()
delayed1 = asyncio.ensure_future(account.receiving.ensure_address_gap())
delayed = asyncio.ensure_future(account.receiving.get_or_create_usable_address())
await asyncio.sleep(0)
# wallet being created and queried at the same time
account.receiving.address_generator_lock.release()
await delayed1
await delayed
async def test_generate_keys_over_batch_threshold_saves_it_properly(self): async def test_generate_keys_over_batch_threshold_saves_it_properly(self):
account = Account.generate(self.ledger, Wallet(), 'lbryum') account = Account.generate(self.ledger, Wallet(), 'lbryum')
async with account.receiving.address_generator_lock: async with account.receiving.address_generator_lock: