Merge branch 'master' into remove_tempblobmanager
This commit is contained in:
commit
7b93073f7b
10 changed files with 94 additions and 29 deletions
|
@ -12,8 +12,12 @@ at anytime.
|
|||
*
|
||||
*
|
||||
|
||||
### Added
|
||||
* Added validation of currencies.
|
||||
* Added blob_announce API command
|
||||
|
||||
### Fixed
|
||||
*
|
||||
* Fixed incorrect formatting of "amount" fields
|
||||
*
|
||||
|
||||
### Deprecated
|
||||
|
@ -48,6 +52,7 @@ at anytime.
|
|||
* Linux default downloads folder changed from `~/Downloads` to `XDG_DOWNLOAD_DIR`
|
||||
* Linux folders moved from the home directory to `~/.local/share/lbry`
|
||||
* Windows folders moved from `%AppData%/Roaming` to `%AppData%/Local/lbry`
|
||||
* Changed `claim_list_by_channel` to return the `claims_in_channel` count instead of the `claims_in_channel_pages` count
|
||||
|
||||
### Added
|
||||
* Add link to instructions on how to change the default peer port
|
||||
|
|
|
@ -191,7 +191,7 @@ Returns:
|
|||
If there was an error:
|
||||
'error': (str) error message
|
||||
|
||||
'claims_in_channel_pages': total number of pages with <page_size> results,
|
||||
'claims_in_channel': the total number of results for the channel,
|
||||
|
||||
If a page of results was requested:
|
||||
'returned_page': page number returned,
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
import logging
|
||||
|
||||
__version__ = "0.14.2"
|
||||
__version__ = "0.14.3rc6"
|
||||
version = tuple(__version__.split('.'))
|
||||
|
||||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||
|
|
|
@ -8,6 +8,7 @@ import yaml
|
|||
import envparse
|
||||
from appdirs import user_data_dir, user_config_dir
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.core.Error import InvalidCurrencyError
|
||||
|
||||
try:
|
||||
from lbrynet.winhelpers.knownpaths import get_path, FOLDERID, UserHandle
|
||||
|
@ -351,6 +352,10 @@ class Config(object):
|
|||
assert name not in self._fixed_defaults, \
|
||||
ValueError('{} is not an editable setting'.format(name))
|
||||
|
||||
def _validate_currency(self, currency):
|
||||
if currency not in self._fixed_defaults['CURRENCIES'].keys():
|
||||
raise InvalidCurrencyError(currency)
|
||||
|
||||
def get(self, name, data_type=None):
|
||||
"""Get a config value
|
||||
|
||||
|
@ -388,6 +393,10 @@ class Config(object):
|
|||
data types (e.g. PERSISTED values to save to a file, CLI values from parsed
|
||||
command-line options, etc), you can specify that with the data_types param
|
||||
"""
|
||||
if name == "max_key_fee":
|
||||
currency = str(value["currency"]).upper()
|
||||
self._validate_currency(currency)
|
||||
|
||||
self._assert_editable_setting(name)
|
||||
for data_type in data_types:
|
||||
self._assert_valid_data_type(data_type)
|
||||
|
|
|
@ -139,3 +139,9 @@ class InvalidAuthenticationToken(Exception):
|
|||
|
||||
class NegotiationError(Exception):
|
||||
pass
|
||||
|
||||
class InvalidCurrencyError(Exception):
|
||||
def __init__(self, currency):
|
||||
self.currency = currency
|
||||
Exception.__init__(
|
||||
self, 'Invalid currency: {} is not a supported currency.'.format(currency))
|
||||
|
|
|
@ -363,7 +363,7 @@ class SqliteStorage(MetaDataStorage):
|
|||
" last_modified)"
|
||||
"VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)",
|
||||
(claim_sequence, claim_id, claim_address, height, amount,
|
||||
supports, serialized, channel_name, signature_is_valid, now))
|
||||
supports, serialized, channel_name, signature_is_valid, now))
|
||||
defer.returnValue(None)
|
||||
|
||||
@rerun_if_locked
|
||||
|
@ -376,7 +376,7 @@ class SqliteStorage(MetaDataStorage):
|
|||
|
||||
if certificate_id:
|
||||
certificate_result = yield self.db.runQuery("SELECT row_id FROM claim_cache "
|
||||
"WHERE claim_id=?", (certificate_id, ))
|
||||
"WHERE claim_id=?", (certificate_id, ))
|
||||
if certificate_id is not None and certificate_result is None:
|
||||
log.warning("Certificate is not in cache")
|
||||
elif certificate_result:
|
||||
|
|
|
@ -1338,8 +1338,7 @@ class Daemon(AuthJSONRPCServer):
|
|||
claim_results = yield self.session.wallet.get_claim_by_outpoint(outpoint)
|
||||
else:
|
||||
raise Exception("Must specify either txid/nout, or claim_id")
|
||||
result = format_json_out_amount_as_float(claim_results)
|
||||
response = yield self._render_response(result)
|
||||
response = yield self._render_response(claim_results)
|
||||
defer.returnValue(response)
|
||||
|
||||
@AuthJSONRPCServer.auth_required
|
||||
|
@ -1961,7 +1960,6 @@ class Daemon(AuthJSONRPCServer):
|
|||
"""
|
||||
|
||||
d = self.session.wallet.get_name_claims()
|
||||
d.addCallback(format_json_out_amount_as_float)
|
||||
d.addCallback(lambda claims: self._render_response(claims))
|
||||
return d
|
||||
|
||||
|
@ -2019,7 +2017,7 @@ class Daemon(AuthJSONRPCServer):
|
|||
If there was an error:
|
||||
'error': (str) error message
|
||||
|
||||
'claims_in_channel_pages': total number of pages with <page_size> results,
|
||||
'claims_in_channel': the total number of results for the channel,
|
||||
|
||||
If a page of results was requested:
|
||||
'returned_page': page number returned,
|
||||
|
@ -2077,7 +2075,7 @@ class Daemon(AuthJSONRPCServer):
|
|||
results[u] = resolved[u]
|
||||
else:
|
||||
results[u] = {
|
||||
'claims_in_channel_pages': resolved[u]['claims_in_channel_pages']
|
||||
'claims_in_channel': resolved[u]['claims_in_channel']
|
||||
}
|
||||
if page:
|
||||
results[u]['returned_page'] = page
|
||||
|
@ -2351,6 +2349,53 @@ class Daemon(AuthJSONRPCServer):
|
|||
d.addCallback(lambda r: self._render_response(r))
|
||||
return d
|
||||
|
||||
@defer.inlineCallbacks
|
||||
@AuthJSONRPCServer.flags(announce_all="-a")
|
||||
def jsonrpc_blob_announce(self, announce_all=None, blob_hash=None,
|
||||
stream_hash=None, sd_hash=None):
|
||||
"""
|
||||
Announce blobs to the DHT
|
||||
|
||||
Usage:
|
||||
blob_announce [-a] [<blob_hash> | --blob_hash=<blob_hash>]
|
||||
[<stream_hash> | --stream_hash=<stream_hash>]
|
||||
[<sd_hash> | --sd_hash=<sd_hash>]
|
||||
|
||||
Options:
|
||||
-a : announce all the blobs possessed by user
|
||||
<blob_hash>, --blob_hash=<blob_hash> : announce a blob, specified by blob_hash
|
||||
<stream_hash>, --stream_hash=<stream_hash> : announce all blobs associated with
|
||||
stream_hash
|
||||
<sd_hash>, --sd_hash=<sd_hash> : announce all blobs associated with
|
||||
sd_hash and the sd_hash itself
|
||||
|
||||
Returns:
|
||||
(bool) true if successful
|
||||
"""
|
||||
if announce_all:
|
||||
yield self.session.blob_manager.immediate_announce_all_blobs()
|
||||
elif blob_hash:
|
||||
blob_hashes = [blob_hash]
|
||||
yield self.session.blob_manager._immediate_announce(blob_hashes)
|
||||
elif stream_hash:
|
||||
blobs = yield self.get_blobs_for_stream_hash(stream_hash)
|
||||
blobs = [blob for blob in blobs if blob.is_validated()]
|
||||
blob_hashes = [blob.blob_hash for blob in blobs]
|
||||
yield self.session.blob_manager._immediate_announce(blob_hashes)
|
||||
elif sd_hash:
|
||||
blobs = yield self.get_blobs_for_sd_hash(sd_hash)
|
||||
blobs = [blob for blob in blobs if blob.is_validated()]
|
||||
blob_hashes = [blob.blob_hash for blob in blobs]
|
||||
blob_hashes.append(sd_hash)
|
||||
yield self.session.blob_manager._immediate_announce(blob_hashes)
|
||||
else:
|
||||
raise Exception('single argument must be specified')
|
||||
|
||||
|
||||
response = yield self._render_response(True)
|
||||
defer.returnValue(response)
|
||||
|
||||
# TODO: This command should be deprecated in favor of blob_announce
|
||||
def jsonrpc_blob_announce_all(self):
|
||||
"""
|
||||
Announce all blobs to the DHT
|
||||
|
@ -2614,19 +2659,3 @@ def get_blob_payment_rate_manager(session, payment_rate_manager=None):
|
|||
payment_rate_manager = rate_managers[payment_rate_manager]
|
||||
log.info("Downloading blob with rate manager: %s", payment_rate_manager)
|
||||
return payment_rate_manager or session.payment_rate_manager
|
||||
|
||||
|
||||
# lbryum returns json loadeable object with amounts as decimal encoded string,
|
||||
# convert them into floats for the daemon
|
||||
# TODO: daemon should also use decimal encoded string
|
||||
def format_json_out_amount_as_float(obj):
|
||||
if isinstance(obj, dict):
|
||||
for k, v in obj.iteritems():
|
||||
if k == 'amount' or k == 'effective_amount':
|
||||
obj[k] = float(obj[k])
|
||||
if isinstance(v, (dict, list)):
|
||||
obj[k] = format_json_out_amount_as_float(v)
|
||||
|
||||
elif isinstance(obj, list):
|
||||
obj = [format_json_out_amount_as_float(o) for o in obj]
|
||||
return obj
|
||||
|
|
|
@ -14,7 +14,7 @@ jsonrpc==1.2
|
|||
jsonrpclib==0.1.7
|
||||
jsonschema==2.5.1
|
||||
git+https://github.com/lbryio/lbryschema.git@v0.0.9#egg=lbryschema
|
||||
git+https://github.com/lbryio/lbryum.git@v3.1.4#egg=lbryum
|
||||
git+https://github.com/lbryio/lbryum.git@v3.1.5rc4#egg=lbryum
|
||||
miniupnpc==1.9
|
||||
pbkdf2==1.3
|
||||
pycrypto==2.6.1
|
||||
|
|
2
setup.py
2
setup.py
|
@ -21,7 +21,7 @@ requires = [
|
|||
'envparse',
|
||||
'jsonrpc',
|
||||
'jsonschema',
|
||||
'lbryum==3.1.4',
|
||||
'lbryum==3.1.5rc4',
|
||||
'lbryschema==0.0.9',
|
||||
'miniupnpc',
|
||||
'pycrypto',
|
||||
|
|
|
@ -1,8 +1,9 @@
|
|||
import os
|
||||
import json
|
||||
|
||||
from twisted.trial import unittest
|
||||
from lbrynet import conf
|
||||
|
||||
from lbrynet.core.Error import InvalidCurrencyError
|
||||
|
||||
class SettingsTest(unittest.TestCase):
|
||||
def setUp(self):
|
||||
|
@ -54,6 +55,21 @@ class SettingsTest(unittest.TestCase):
|
|||
settings.set('test', 'runtime_takes_precedence', data_types=(conf.TYPE_RUNTIME,))
|
||||
self.assertEqual('runtime_takes_precedence', settings['test'])
|
||||
|
||||
def test_max_key_fee_set(self):
|
||||
fixed_default = {'CURRENCIES':{'BTC':{'type':'crypto'}}}
|
||||
adjustable_settings = {'max_key_fee': (json.loads, {'currency':'USD', 'amount':1})}
|
||||
env = conf.Env(**adjustable_settings)
|
||||
settings = conf.Config(fixed_default, adjustable_settings, environment=env)
|
||||
|
||||
with self.assertRaises(InvalidCurrencyError):
|
||||
settings.set('max_key_fee', {'currency':'USD', 'amount':1})
|
||||
|
||||
valid_setting= {'currency':'BTC', 'amount':1}
|
||||
settings.set('max_key_fee', valid_setting )
|
||||
out = settings.get('max_key_fee')
|
||||
self.assertEqual(out, valid_setting)
|
||||
|
||||
|
||||
def test_data_dir(self):
|
||||
# check if these directories are returned as string and not unicode
|
||||
# otherwise there will be problems when calling os.path.join on
|
||||
|
|
Loading…
Reference in a new issue