Merge branch 'fix-update-stream'

[API] add `aioupnp_version` to `upnp` field in the response to `status`
[API] fix uncaught claim signature errors
This commit is contained in:
Jack Robison 2018-10-26 16:49:51 -04:00
commit 6cb30f8e05
No known key found for this signature in database
GPG key ID: DF25C68FE0239BB2
6 changed files with 19 additions and 8 deletions

View file

@ -30,7 +30,7 @@ class HTTPSHandler(logging.Handler):
@defer.inlineCallbacks
def _emit(self, record):
payload = self.format(record)
response = yield treq.post(self.url, data=payload, cookies=self.cookies)
response = yield treq.post(self.url, data=payload.encode(), cookies=self.cookies)
self.cookies.update(response.cookies())
def emit(self, record):

View file

@ -8,6 +8,7 @@ from hashlib import sha256
from types import SimpleNamespace
from twisted.internet import defer, threads, reactor, error, task
import lbryschema
from aioupnp import __version__ as aioupnp_version
from aioupnp.upnp import UPnP
from aioupnp.fault import UPnPError
from lbrynet import conf
@ -792,8 +793,9 @@ class UPnPComponent(Component):
def get_status(self):
return {
'aioupnp_version': aioupnp_version,
'redirects': self.upnp_redirects,
'gateway': '' if not self.upnp else self.upnp.gateway.manufacturer_string,
'gateway': 'No gateway found' if not self.upnp else self.upnp.gateway.manufacturer_string,
'dht_redirect_set': 'UDP' in self.upnp_redirects,
'peer_redirect_set': 'TCP' in self.upnp_redirects,
'external_ip': self.external_ip

View file

@ -773,6 +773,7 @@ class Daemon(AuthJSONRPCServer):
'managed_files': (int) count of files in the file manager,
},
'upnp': {
'aioupnp_version': (str),
'redirects': {
<TCP | UDP>: (int) external_port,
},
@ -2295,7 +2296,7 @@ class Daemon(AuthJSONRPCServer):
except DecodeError as err:
# there was a problem with a metadata field, raise an error here rather than
# waiting to find out when we go to publish the claim (after having made the stream)
raise Exception("invalid publish metadata: %s" % err.message)
raise Exception(f"invalid publish metadata: {err}")
certificate = None
if channel_id or channel_name:

View file

@ -2,6 +2,7 @@ from decimal import Decimal
from binascii import hexlify
from datetime import datetime
from json import JSONEncoder
from ecdsa import BadSignatureError
from lbrynet.wallet.transaction import Transaction, Output
from lbrynet.wallet.dewies import dewies_to_lbc
from lbrynet.wallet.ledger import MainNetLedger
@ -68,9 +69,12 @@ class JSONResponseEncoder(JSONEncoder):
output['valid_signature'] = None
if txo.channel is not None:
output['channel_name'] = txo.channel.claim_name
output['valid_signature'] = claim.validate_signature(
txo.get_address(self.ledger), txo.channel.claim
)
try:
output['valid_signature'] = claim.validate_signature(
txo.get_address(self.ledger), txo.channel.claim
)
except BadSignatureError:
output['valid_signature'] = False
if txo.script.is_claim_name:
output['type'] = 'claim'

View file

@ -709,7 +709,9 @@ class SQLiteStorage:
"select claim_id from claim where claim_outpoint=?", current_associated_content
).fetchone()[0]
if current_associated_claim_id != new_claim_id:
raise Exception("invalid stream update")
raise Exception(
f"mismatching claim ids when updating stream {current_associated_claim_id} vs {new_claim_id}"
)
# update the claim associated to the file
transaction.execute("insert or replace into content_claim values (?, ?)", (stream_hash, claim_outpoint))

View file

@ -334,7 +334,9 @@ class ContentClaimStorageTests(StorageTest):
invalid_update_info['nout'] = 0
invalid_update_info['claim_id'] = "beef0002" * 5
invalid_update_outpoint = "%s:%i" % (invalid_update_info['txid'], invalid_update_info['nout'])
with self.assertRaisesRegex(Exception, "invalid stream update"):
with self.assertRaisesRegex(Exception, "mismatching claim ids when updating stream "
"deadbeefdeadbeefdeadbeefdeadbeefdeadbeef "
"vs beef0002beef0002beef0002beef0002beef0002"):
yield self.storage.save_claims([invalid_update_info])
yield self.storage.save_content_claim(stream_hash, invalid_update_outpoint)
current_claim_info = yield self.storage.get_content_claim(stream_hash)