Merge branch 'master' into reflector
This commit is contained in:
commit
1c88cbc5f6
31 changed files with 729 additions and 433 deletions
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 0.3.13
|
||||
current_version = 0.3.17
|
||||
commit = True
|
||||
tag = True
|
||||
message = Bump version: {current_version} -> {new_version}
|
||||
|
|
|
@ -4,5 +4,5 @@ log = logging.getLogger(__name__)
|
|||
logging.getLogger(__name__).addHandler(logging.NullHandler())
|
||||
log.setLevel(logging.INFO)
|
||||
|
||||
__version__ = "0.3.13"
|
||||
__version__ = "0.3.17"
|
||||
version = tuple(__version__.split('.'))
|
|
@ -60,3 +60,5 @@ CURRENCIES = {
|
|||
'LBC': {'type': 'crypto'},
|
||||
'USD': {'type': 'fiat'},
|
||||
}
|
||||
|
||||
LOGGLY_TOKEN = 'YWRmNGU4NmEtNjkwNC00YjM2LTk3ZjItMGZhODM3ZDhkYzBi'
|
||||
|
|
|
@ -1,9 +1,6 @@
|
|||
import requests
|
||||
import json
|
||||
import time
|
||||
|
||||
from copy import deepcopy
|
||||
from googlefinance import getQuotes
|
||||
from lbrynet.conf import CURRENCIES
|
||||
from lbrynet.core import utils
|
||||
import logging
|
||||
|
@ -90,6 +87,10 @@ class LBRYFeeValidator(dict):
|
|||
|
||||
|
||||
class Metadata(dict):
|
||||
@classmethod
|
||||
def load_from_hex(cls, metadata):
|
||||
return cls(json.loads(metadata.decode('hex')))
|
||||
|
||||
def __init__(self, metadata):
|
||||
dict.__init__(self)
|
||||
self.meta_version = None
|
||||
|
@ -126,3 +127,9 @@ class Metadata(dict):
|
|||
assert self.meta_version == self['ver'], "version mismatch"
|
||||
break
|
||||
assert metadata == {}, "Unknown metadata keys: %s" % json.dumps(metadata.keys())
|
||||
|
||||
def serialize(self):
|
||||
return json.dumps(self).encode("hex")
|
||||
|
||||
def as_json(self):
|
||||
return json.dumps(self)
|
||||
|
|
|
@ -6,7 +6,6 @@ import subprocess
|
|||
import socket
|
||||
import time
|
||||
import os
|
||||
import requests
|
||||
|
||||
from bitcoinrpc.authproxy import AuthServiceProxy, JSONRPCException
|
||||
from twisted.internet import threads, reactor, defer, task
|
||||
|
@ -15,10 +14,9 @@ from twisted.enterprise import adbapi
|
|||
from collections import defaultdict, deque
|
||||
from zope.interface import implements
|
||||
from decimal import Decimal
|
||||
from googlefinance import getQuotes
|
||||
|
||||
from lbryum import SimpleConfig, Network
|
||||
from lbryum.lbrycrd import COIN, TYPE_ADDRESS
|
||||
from lbryum.lbrycrd import COIN
|
||||
from lbryum.wallet import WalletStorage, Wallet
|
||||
from lbryum.commands import known_commands, Commands
|
||||
from lbryum.transaction import Transaction
|
||||
|
@ -27,8 +25,6 @@ from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHand
|
|||
from lbrynet.core.client.ClientRequest import ClientRequest
|
||||
from lbrynet.core.Error import UnknownNameError, InvalidStreamInfoError, RequestCanceledError
|
||||
from lbrynet.core.Error import InsufficientFundsError
|
||||
from lbrynet.core.sqlite_helpers import rerun_if_locked
|
||||
from lbrynet.conf import SOURCE_TYPES
|
||||
from lbrynet.core.LBRYMetadata import Metadata
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -90,6 +86,7 @@ class LBRYWallet(object):
|
|||
return True
|
||||
|
||||
d = self._open_db()
|
||||
d.addCallback(lambda _: self._clean_bad_records())
|
||||
d.addCallback(lambda _: self._start())
|
||||
d.addCallback(lambda _: start_manage())
|
||||
return d
|
||||
|
@ -324,6 +321,10 @@ class LBRYWallet(object):
|
|||
for k in ['value', 'txid', 'n', 'height', 'amount']:
|
||||
assert k in r, "getvalueforname response missing field %s" % k
|
||||
|
||||
def _log_success(claim_id):
|
||||
log.info("lbry://%s complies with %s, claimid: %s", name, metadata.meta_version, claim_id)
|
||||
return defer.succeed(None)
|
||||
|
||||
if 'error' in result:
|
||||
log.warning("Got an error looking up a name: %s", result['error'])
|
||||
return Failure(UnknownNameError(name))
|
||||
|
@ -335,55 +336,116 @@ class LBRYWallet(object):
|
|||
except (ValueError, TypeError):
|
||||
return Failure(InvalidStreamInfoError(name))
|
||||
|
||||
d = self._save_name_metadata(name, str(result['txid']), metadata['sources']['lbry_sd_hash'])
|
||||
d.addCallback(lambda _: log.info("lbry://%s complies with %s" % (name, metadata.meta_version)))
|
||||
txid = result['txid']
|
||||
sd_hash = metadata['sources']['lbry_sd_hash']
|
||||
d = self._save_name_metadata(name, txid, sd_hash)
|
||||
d.addCallback(lambda _: self.get_claimid(name, txid))
|
||||
d.addCallback(lambda cid: _log_success(cid))
|
||||
d.addCallback(lambda _: metadata)
|
||||
return d
|
||||
|
||||
def _get_claim_info(self, result, name):
|
||||
def _check_result_fields(r):
|
||||
for k in ['value', 'txid', 'n', 'height', 'amount']:
|
||||
assert k in r, "getvalueforname response missing field %s" % k
|
||||
|
||||
def _build_response(m, result):
|
||||
result['value'] = m
|
||||
return result
|
||||
|
||||
if 'error' in result:
|
||||
log.warning("Got an error looking up a name: %s", result['error'])
|
||||
return Failure(UnknownNameError(name))
|
||||
|
||||
_check_result_fields(result)
|
||||
|
||||
try:
|
||||
metadata = Metadata(json.loads(result['value']))
|
||||
except (ValueError, TypeError):
|
||||
return Failure(InvalidStreamInfoError(name))
|
||||
|
||||
d = self._save_name_metadata(name, str(result['txid']), metadata['sources']['lbry_sd_hash'])
|
||||
d.addCallback(lambda _: log.info("lbry://%s complies with %s" % (name, metadata.meta_version)))
|
||||
d.addCallback(lambda _: _build_response(metadata, result))
|
||||
def get_claim(self, name, claim_id):
|
||||
d = self.get_claims_for_name(name)
|
||||
d.addCallback(lambda claims: next(claim for claim in claims['claims'] if claim['claimId'] == claim_id))
|
||||
return d
|
||||
|
||||
def get_claim_info(self, name):
|
||||
d = self._get_value_for_name(name)
|
||||
d.addCallback(lambda r: self._get_claim_info(r, name))
|
||||
def get_claimid(self, name, txid):
|
||||
def _get_id_for_return(claim_id):
|
||||
if claim_id:
|
||||
return defer.succeed(claim_id)
|
||||
else:
|
||||
d = self.get_claims_from_tx(txid)
|
||||
d.addCallback(lambda claims: next(c['claimId'] for c in claims if c['name'] == name))
|
||||
d.addCallback(lambda cid: self._update_claimid(cid, name, txid))
|
||||
return d
|
||||
|
||||
d = self._get_claimid_for_tx(name, txid)
|
||||
d.addCallback(_get_id_for_return)
|
||||
return d
|
||||
|
||||
def get_claim_info(self, name, txid=None):
|
||||
if not txid:
|
||||
d = self._get_value_for_name(name)
|
||||
d.addCallback(lambda r: self._get_claim_info(name, r['txid']))
|
||||
else:
|
||||
d = self._get_claim_info(name, txid)
|
||||
d.addErrback(lambda _: False)
|
||||
return d
|
||||
|
||||
def _get_claim_info(self, name, txid):
|
||||
def _build_response(claim):
|
||||
result = {}
|
||||
try:
|
||||
metadata = Metadata(json.loads(claim['value']))
|
||||
meta_ver = metadata.meta_version
|
||||
sd_hash = metadata['sources']['lbry_sd_hash']
|
||||
d = self._save_name_metadata(name, txid, sd_hash)
|
||||
except AssertionError:
|
||||
metadata = claim['value']
|
||||
meta_ver = "Non-compliant"
|
||||
d = defer.succeed(None)
|
||||
|
||||
claim_id = claim['claimId']
|
||||
result['claim_id'] = claim_id
|
||||
result['amount'] = claim['nEffectiveAmount']
|
||||
result['height'] = claim['nHeight']
|
||||
result['name'] = name
|
||||
result['txid'] = txid
|
||||
result['value'] = metadata
|
||||
result['supports'] = [{'txid': support['txid'], 'n': support['n']} for support in claim['supports']]
|
||||
result['meta_version'] = meta_ver
|
||||
|
||||
log.info("get claim info lbry://%s metadata: %s, claimid: %s", name, meta_ver, claim_id)
|
||||
|
||||
d.addCallback(lambda _: self.get_name_claims())
|
||||
d.addCallback(lambda r: [c['txid'] for c in r])
|
||||
d.addCallback(lambda my_claims: _add_is_mine(result, my_claims))
|
||||
return d
|
||||
|
||||
def _add_is_mine(response, my_txs):
|
||||
response['is_mine'] = response['txid'] in my_txs
|
||||
return response
|
||||
|
||||
d = self.get_claimid(name, txid)
|
||||
d.addCallback(lambda claim_id: self.get_claim(name, claim_id))
|
||||
d.addCallback(_build_response)
|
||||
return d
|
||||
|
||||
def get_claims_for_name(self, name):
|
||||
d = self._get_claims_for_name(name)
|
||||
return d
|
||||
|
||||
def update_metadata(self, new_metadata, old_metadata):
|
||||
meta_for_return = old_metadata if isinstance(old_metadata, dict) else {}
|
||||
for k in new_metadata:
|
||||
meta_for_return[k] = new_metadata[k]
|
||||
return defer.succeed(Metadata(meta_for_return))
|
||||
|
||||
def claim_name(self, name, bid, m):
|
||||
|
||||
metadata = Metadata(m)
|
||||
|
||||
d = self._send_name_claim(name, json.dumps(metadata), bid)
|
||||
|
||||
def _save_metadata(txid):
|
||||
def _save_metadata(txid, metadata):
|
||||
log.info("Saving metadata for claim %s" % txid)
|
||||
d = self._save_name_metadata(name, txid, metadata['sources']['lbry_sd_hash'])
|
||||
d.addCallback(lambda _: txid)
|
||||
return d
|
||||
|
||||
d.addCallback(_save_metadata)
|
||||
def _claim_or_update(claim, metadata, _bid):
|
||||
if not claim:
|
||||
log.info("No claim yet, making a new one")
|
||||
return self._send_name_claim(name, metadata.as_json(), _bid)
|
||||
if not claim['is_mine']:
|
||||
log.info("Making a contesting claim")
|
||||
return self._send_name_claim(name, metadata.as_json(), _bid)
|
||||
else:
|
||||
log.info("Updating over own claim")
|
||||
d = self.update_metadata(metadata, claim['value'])
|
||||
d.addCallback(lambda new_metadata: self._send_name_claim_update(name, claim['claim_id'], claim['txid'], new_metadata, _bid))
|
||||
return d
|
||||
|
||||
meta = Metadata(m)
|
||||
|
||||
d = self.get_claim_info(name)
|
||||
d.addCallback(lambda claim: _claim_or_update(claim, meta, bid))
|
||||
d.addCallback(lambda txid: _save_metadata(txid, meta))
|
||||
return d
|
||||
|
||||
def abandon_name(self, txid):
|
||||
|
@ -419,19 +481,14 @@ class LBRYWallet(object):
|
|||
dl.addCallback(abandon)
|
||||
return dl
|
||||
|
||||
def support_claim(self, name, claim_id, amount):
|
||||
return self._support_claim(name, claim_id, amount)
|
||||
|
||||
def get_tx(self, txid):
|
||||
d = self._get_raw_tx(txid)
|
||||
d.addCallback(self._get_decoded_tx)
|
||||
return d
|
||||
|
||||
def update_name(self, name, bid, value, old_txid):
|
||||
d = self._get_value_for_name(name)
|
||||
d.addCallback(lambda r: self.abandon_name(r['txid'] if not old_txid else old_txid))
|
||||
d.addCallback(lambda r: log.info("Abandon claim tx %s" % str(r)))
|
||||
d.addCallback(lambda _: self.claim_name(name, bid, value))
|
||||
|
||||
return d
|
||||
|
||||
def get_name_and_validity_for_sd_hash(self, sd_hash):
|
||||
d = self._get_claim_metadata_for_sd_hash(sd_hash)
|
||||
d.addCallback(lambda name_txid: self._get_status_of_claim(name_txid[1], name_txid[0], sd_hash) if name_txid is not None else None)
|
||||
|
@ -534,21 +591,45 @@ class LBRYWallet(object):
|
|||
def _open_db(self):
|
||||
self.db = adbapi.ConnectionPool('sqlite3', os.path.join(self.db_dir, "blockchainname.db"),
|
||||
check_same_thread=False)
|
||||
return self.db.runQuery("create table if not exists name_metadata (" +
|
||||
|
||||
def create_tables(transaction):
|
||||
transaction.execute("create table if not exists name_metadata (" +
|
||||
" name text, " +
|
||||
" txid text, " +
|
||||
" sd_hash text)")
|
||||
transaction.execute("create table if not exists claim_ids (" +
|
||||
" claimId text, " +
|
||||
" name text, " +
|
||||
" txid text)")
|
||||
|
||||
return self.db.runInteraction(create_tables)
|
||||
|
||||
def _clean_bad_records(self):
|
||||
d = self.db.runQuery("delete from name_metadata where length(txid) > 64 or txid is null")
|
||||
return d
|
||||
|
||||
def _save_name_metadata(self, name, txid, sd_hash):
|
||||
d = self.db.runQuery("select * from name_metadata where name=? and txid=? and sd_hash=?", (name, txid, sd_hash))
|
||||
d.addCallback(lambda r: self.db.runQuery("insert into name_metadata values (?, ?, ?)", (name, txid, sd_hash))
|
||||
if not len(r) else None)
|
||||
|
||||
assert len(txid) == 64, "That's not a txid: %s" % str(txid)
|
||||
d = self.db.runQuery("delete from name_metadata where name=? and txid=? and sd_hash=?", (name, txid, sd_hash))
|
||||
d.addCallback(lambda _: self.db.runQuery("insert into name_metadata values (?, ?, ?)", (name, txid, sd_hash)))
|
||||
return d
|
||||
|
||||
def _get_claim_metadata_for_sd_hash(self, sd_hash):
|
||||
d = self.db.runQuery("select name, txid from name_metadata where sd_hash=?", (sd_hash,))
|
||||
d.addCallback(lambda r: r[0] if len(r) else None)
|
||||
d.addCallback(lambda r: r[0] if r else None)
|
||||
return d
|
||||
|
||||
def _update_claimid(self, claim_id, name, txid):
|
||||
assert len(txid) == 64, "That's not a txid: %s" % str(txid)
|
||||
d = self.db.runQuery("delete from claim_ids where claimId=? and name=? and txid=?", (claim_id, name, txid))
|
||||
d.addCallback(lambda r: self.db.runQuery("insert into claim_ids values (?, ?, ?)", (claim_id, name, txid)))
|
||||
d.addCallback(lambda _: claim_id)
|
||||
return d
|
||||
|
||||
def _get_claimid_for_tx(self, name, txid):
|
||||
assert len(txid) == 64, "That's not a txid: %s" % str(txid)
|
||||
d = self.db.runQuery("select claimId from claim_ids where name=? and txid=?", (name, txid))
|
||||
d.addCallback(lambda r: r[0][0] if r else None)
|
||||
return d
|
||||
|
||||
######### Must be overridden #########
|
||||
|
@ -571,6 +652,9 @@ class LBRYWallet(object):
|
|||
def get_name_claims(self):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
def _get_claims_for_name(self, name):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
def _check_first_run(self):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
|
@ -586,7 +670,10 @@ class LBRYWallet(object):
|
|||
def _send_abandon(self, txid, address, amount):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
def _update_name(self, txid, value, amount):
|
||||
def _send_name_claim_update(self, name, claim_id, txid, value, amount):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
def _support_claim(self, name, claim_id, amount):
|
||||
return defer.fail(NotImplementedError())
|
||||
|
||||
def _do_send_many(self, payments_to_send):
|
||||
|
@ -721,9 +808,15 @@ class LBRYcrdWallet(LBRYWallet):
|
|||
def _send_abandon(self, txid, address, amount):
|
||||
return threads.deferToThread(self._send_abandon_rpc, txid, address, amount)
|
||||
|
||||
def _update_name(self, txid, value, amount):
|
||||
def _send_name_claim_update(self, name, claim_id, txid, value, amount):
|
||||
return threads.deferToThread(self._update_name_rpc, txid, value, amount)
|
||||
|
||||
def _support_claim(self, name, claim_id, amount):
|
||||
return threads.deferToThread(self._support_claim_rpc, name, claim_id, amount)
|
||||
|
||||
def _get_claims_for_name(self, name):
|
||||
return threads.deferToThread(self._get_claims_for_name_rpc, name)
|
||||
|
||||
def get_claims_from_tx(self, txid):
|
||||
return threads.deferToThread(self._get_claims_from_tx_rpc, txid)
|
||||
|
||||
|
@ -858,6 +951,11 @@ class LBRYcrdWallet(LBRYWallet):
|
|||
rpc_conn = self._get_rpc_conn()
|
||||
return rpc_conn.getclaimsfortx(txid)
|
||||
|
||||
@_catch_connection_error
|
||||
def _get_claims_for_name_rpc(self, name):
|
||||
rpc_conn = self._get_rpc_conn()
|
||||
return rpc_conn.getclaimsforname(name)
|
||||
|
||||
@_catch_connection_error
|
||||
def _get_nametrie_rpc(self):
|
||||
rpc_conn = self._get_rpc_conn()
|
||||
|
@ -878,6 +976,7 @@ class LBRYcrdWallet(LBRYWallet):
|
|||
rpc_conn = self._get_rpc_conn()
|
||||
return rpc_conn.getvalueforname(name)
|
||||
|
||||
@_catch_connection_error
|
||||
def _update_name_rpc(self, txid, value, amount):
|
||||
rpc_conn = self._get_rpc_conn()
|
||||
return rpc_conn.updateclaim(txid, value, amount)
|
||||
|
@ -893,6 +992,11 @@ class LBRYcrdWallet(LBRYWallet):
|
|||
elif 'message' in e.error:
|
||||
raise ValueError(e.error['message'])
|
||||
|
||||
@_catch_connection_error
|
||||
def _support_claim_rpc(self, name, claim_id, amount):
|
||||
rpc_conn = self._get_rpc_conn()
|
||||
return rpc_conn.supportclaim(name, claim_id, amount)
|
||||
|
||||
@_catch_connection_error
|
||||
def _get_num_addresses_rpc(self):
|
||||
rpc_conn = self._get_rpc_conn()
|
||||
|
@ -1106,6 +1210,25 @@ class LBRYumWallet(LBRYWallet):
|
|||
d.addCallback(self._broadcast_transaction)
|
||||
return d
|
||||
|
||||
def _get_claims_for_name(self, name):
|
||||
cmd = known_commands['getclaimsforname']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
return threads.deferToThread(func, name)
|
||||
|
||||
def _send_name_claim_update(self, name, claim_id, txid, value, amount):
|
||||
def send_claim_update(address):
|
||||
decoded_claim_id = claim_id.decode('hex')[::-1]
|
||||
metadata = Metadata(value).as_json()
|
||||
log.info("updateclaim %s %s %f %s %s '%s'", txid, address, amount, name, decoded_claim_id.encode('hex'), json.dumps(metadata))
|
||||
cmd = known_commands['updateclaim']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
return threads.deferToThread(func, txid, address, amount, name, decoded_claim_id, metadata)
|
||||
|
||||
d = self.get_new_address()
|
||||
d.addCallback(send_claim_update)
|
||||
d.addCallback(self._broadcast_transaction)
|
||||
return d
|
||||
|
||||
def _get_decoded_tx(self, raw_tx):
|
||||
tx = Transaction(raw_tx)
|
||||
decoded_tx = {}
|
||||
|
@ -1117,18 +1240,33 @@ class LBRYumWallet(LBRYWallet):
|
|||
return decoded_tx
|
||||
|
||||
def _send_abandon(self, txid, address, amount):
|
||||
log.info("Abandon " + str(txid) + " " + str(address) + " " + str(amount))
|
||||
log.info("Abandon %s %s %f" % (txid, address, amount))
|
||||
cmd = known_commands['abandonclaim']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
d = threads.deferToThread(func, txid, address, amount)
|
||||
d.addCallback(self._broadcast_transaction)
|
||||
return d
|
||||
|
||||
def _support_claim(self, name, claim_id, amount):
|
||||
def _send_support(d, a, n, c):
|
||||
cmd = known_commands['supportclaim']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
d = threads.deferToThread(func, d, a, n, c)
|
||||
return d
|
||||
d = self.get_new_address()
|
||||
d.addCallback(lambda address: _send_support(address, amount, name, claim_id))
|
||||
d.addCallback(self._broadcast_transaction)
|
||||
return d
|
||||
|
||||
def _broadcast_transaction(self, raw_tx):
|
||||
log.info("Broadcast: " + str(raw_tx))
|
||||
def _log_tx(r):
|
||||
log.info("Broadcast tx: %s", r)
|
||||
return r
|
||||
cmd = known_commands['broadcast']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
d = threads.deferToThread(func, raw_tx)
|
||||
d.addCallback(_log_tx)
|
||||
d.addCallback(lambda r: r if len(r) == 64 else defer.fail(Exception("Transaction rejected")))
|
||||
d.addCallback(self._save_wallet)
|
||||
return d
|
||||
|
||||
|
|
|
@ -1,24 +1,111 @@
|
|||
import base64
|
||||
import json
|
||||
import logging
|
||||
import logging.handlers
|
||||
import sys
|
||||
import traceback
|
||||
import lbrynet
|
||||
from lbrynet import conf
|
||||
from requests_futures.sessions import FuturesSession
|
||||
|
||||
session = FuturesSession()
|
||||
|
||||
|
||||
def bg_cb(sess, resp):
|
||||
""" Don't do anything with the response """
|
||||
pass
|
||||
|
||||
|
||||
class HTTPSHandler(logging.Handler):
|
||||
def __init__(self, url, fqdn=False, localname=None, facility=None):
|
||||
logging.Handler.__init__(self)
|
||||
self.url = url
|
||||
self.fqdn = fqdn
|
||||
self.localname = localname
|
||||
self.facility = facility
|
||||
|
||||
def get_full_message(self, record):
|
||||
if record.exc_info:
|
||||
return '\n'.join(traceback.format_exception(*record.exc_info))
|
||||
else:
|
||||
return record.getMessage()
|
||||
|
||||
def emit(self, record):
|
||||
try:
|
||||
payload = self.format(record)
|
||||
session.post(self.url, data=payload, background_callback=bg_cb)
|
||||
except (KeyboardInterrupt, SystemExit):
|
||||
raise
|
||||
except:
|
||||
self.handleError(record)
|
||||
|
||||
|
||||
DEFAULT_FORMAT = "%(asctime)s %(levelname)-8s %(name)s:%(lineno)d: %(message)s"
|
||||
DEFAULT_FORMATTER = logging.Formatter(DEFAULT_FORMAT)
|
||||
LOGGLY_URL = "https://logs-01.loggly.com/inputs/{token}/tag/{tag}"
|
||||
|
||||
|
||||
def configureConsole(log=None, level=logging.INFO):
|
||||
def remove_handlers(log, handler_name):
|
||||
for handler in log.handlers:
|
||||
if handler.name == handler_name:
|
||||
log.removeHandler(handler)
|
||||
|
||||
|
||||
def _log_decorator(fn):
|
||||
def helper(*args, **kwargs):
|
||||
log = kwargs.pop('log', logging.getLogger())
|
||||
level = kwargs.pop('level', logging.INFO)
|
||||
handler = fn(*args, **kwargs)
|
||||
if handler.name:
|
||||
remove_handlers(log, handler.name)
|
||||
log.addHandler(handler)
|
||||
log.setLevel(level)
|
||||
return helper
|
||||
|
||||
|
||||
def disable_noisy_loggers():
|
||||
logging.getLogger('requests').setLevel(logging.WARNING)
|
||||
|
||||
|
||||
@_log_decorator
|
||||
def configure_console(**kwargs):
|
||||
"""Convenience function to configure a logger that outputs to stdout"""
|
||||
log = log or logging.getLogger()
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(DEFAULT_FORMATTER)
|
||||
log.addHandler(handler)
|
||||
log.setLevel(level=level)
|
||||
handler.name = 'console'
|
||||
return handler
|
||||
|
||||
|
||||
def configureFileHandler(file_name, log=None, level=logging.INFO):
|
||||
log = log or logging.getLogger()
|
||||
@_log_decorator
|
||||
def configure_file_handler(file_name, **kwargs):
|
||||
handler = logging.handlers.RotatingFileHandler(file_name, maxBytes=2097152, backupCount=5)
|
||||
handler.setFormatter(DEFAULT_FORMATTER)
|
||||
log.addHandler(handler)
|
||||
log.setLevel(level=level)
|
||||
handler.name = 'file'
|
||||
return handler
|
||||
|
||||
|
||||
def get_loggly_url(token=None, version=None):
|
||||
token = token or base64.b64decode(conf.LOGGLY_TOKEN)
|
||||
version = version or lbrynet.__version__
|
||||
return LOGGLY_URL.format(token=token, tag='lbrynet-' + version)
|
||||
|
||||
|
||||
@_log_decorator
|
||||
def configure_loggly_handler(url=None, **kwargs):
|
||||
url = url or get_loggly_url()
|
||||
json_format = {
|
||||
"loggerName": "%(name)s",
|
||||
"asciTime": "%(asctime)s",
|
||||
"fileName": "%(filename)s",
|
||||
"functionName": "%(funcName)s",
|
||||
"levelNo": "%(levelno)s",
|
||||
"lineNo": "%(lineno)d",
|
||||
"levelName": "%(levelname)s",
|
||||
"message": "%(message)s",
|
||||
}
|
||||
json_format.update(kwargs)
|
||||
formatter = logging.Formatter(json.dumps(json_format))
|
||||
handler = HTTPSHandler(url)
|
||||
handler.setFormatter(formatter)
|
||||
handler.name = 'loggly'
|
||||
return handler
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import binascii
|
||||
from twisted.internet import defer, task, reactor
|
||||
from twisted.internet import defer, reactor
|
||||
import collections
|
||||
|
||||
|
||||
|
@ -78,4 +78,4 @@ class DHTHashSupplier(object):
|
|||
self.hash_reannounce_time = 60 * 60 # 1 hour
|
||||
|
||||
def hashes_to_announce(self):
|
||||
pass
|
||||
pass
|
||||
|
|
|
@ -99,8 +99,8 @@ class ServerRequestHandler(object):
|
|||
d.addCallback(lambda _: self.blob_sender.send_blob_if_requested(self))
|
||||
d.addCallbacks(lambda _: self.finished_response(), self.request_failure_handler)
|
||||
else:
|
||||
log.info("Request buff not a valid json message")
|
||||
log.info("Request buff: %s", str(self.request_buff))
|
||||
log.debug("Request buff not a valid json message")
|
||||
log.debug("Request buff: %s", str(self.request_buff))
|
||||
else:
|
||||
log.warning("The client sent data when we were uploading a file. This should not happen")
|
||||
|
||||
|
@ -125,7 +125,7 @@ class ServerRequestHandler(object):
|
|||
|
||||
def send_response(self, msg):
|
||||
m = json.dumps(msg)
|
||||
log.info("Sending a response of length %s", str(len(m)))
|
||||
log.debug("Sending a response of length %s", str(len(m)))
|
||||
log.debug("Response: %s", str(m))
|
||||
self.response_buff = self.response_buff + m
|
||||
self._produce_more()
|
||||
|
@ -171,4 +171,4 @@ class ServerRequestHandler(object):
|
|||
msg = json.loads(request_buff)
|
||||
return msg
|
||||
except ValueError:
|
||||
return None
|
||||
return None
|
||||
|
|
|
@ -8,10 +8,7 @@
|
|||
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
||||
|
||||
import UserDict
|
||||
#import sqlite3
|
||||
import cPickle as pickle
|
||||
import time
|
||||
import os
|
||||
import constants
|
||||
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@
|
|||
# The docstrings in this module contain epytext markup; API documentation
|
||||
# may be created by processing this file with epydoc: http://epydoc.sf.net
|
||||
|
||||
import hashlib, random, struct, time, math, binascii
|
||||
import hashlib, random, struct, time, binascii
|
||||
import argparse
|
||||
from twisted.internet import defer, error
|
||||
import constants
|
||||
|
@ -1017,4 +1017,4 @@ def main():
|
|||
twisted.internet.reactor.run()
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
main()
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
|
||||
|
||||
|
||||
import os, sys, time, signal, hashlib, random
|
||||
import sys, hashlib, random
|
||||
import twisted.internet.reactor
|
||||
from lbrynet.dht.node import Node
|
||||
#from entangled.kademlia.datastore import SQLiteDataStore
|
||||
|
@ -106,7 +106,7 @@ def stop():
|
|||
|
||||
if __name__ == '__main__':
|
||||
|
||||
import sys, os
|
||||
import sys
|
||||
if len(sys.argv) < 2:
|
||||
print 'Usage:\n%s UDP_PORT [KNOWN_NODE_IP KNOWN_NODE_PORT]' % sys.argv[0]
|
||||
print 'or:\n%s UDP_PORT [FILE_WITH_KNOWN_NODES]' % sys.argv[0]
|
||||
|
|
|
@ -48,7 +48,7 @@ class DBLBRYFileMetadataManager(object):
|
|||
return self._add_blobs_to_stream(stream_hash, blobs, ignore_duplicate_error=True)
|
||||
|
||||
def get_blobs_for_stream(self, stream_hash, start_blob=None, end_blob=None, count=None, reverse=False):
|
||||
log.info("Getting blobs for a stream. Count is %s", str(count))
|
||||
log.debug("Getting blobs for a stream. Count is %s", str(count))
|
||||
|
||||
def get_positions_of_start_and_end():
|
||||
if start_blob is not None:
|
||||
|
|
|
@ -11,7 +11,6 @@ from lbrynet import conf
|
|||
from lbrynet.lbryfile.StreamDescriptor import get_sd_info
|
||||
from lbrynet.core.cryptoutils import get_lbry_hash_obj
|
||||
from twisted.protocols.basic import FileSender
|
||||
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloader
|
||||
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
@ -155,4 +154,4 @@ def create_lbry_file(session, lbry_file_manager, file_name, file_handle, key=Non
|
|||
|
||||
d = lbry_file_creator.setup()
|
||||
d.addCallback(lambda _: start_stream())
|
||||
return d
|
||||
return d
|
||||
|
|
|
@ -27,6 +27,7 @@ class ManagedLBRYFileDownloader(LBRYFileSaver):
|
|||
self.sd_hash = None
|
||||
self.txid = None
|
||||
self.uri = None
|
||||
self.claim_id = None
|
||||
self.rowid = rowid
|
||||
self.lbry_file_manager = lbry_file_manager
|
||||
self.saving_status = False
|
||||
|
@ -43,10 +44,16 @@ class ManagedLBRYFileDownloader(LBRYFileSaver):
|
|||
|
||||
return d
|
||||
|
||||
def _save_claim_id(claim_id):
|
||||
self.claim_id = claim_id
|
||||
return defer.succeed(None)
|
||||
|
||||
def _save_claim(name, txid):
|
||||
self.uri = name
|
||||
self.txid = txid
|
||||
return defer.succeed(None)
|
||||
d = self.wallet.get_claimid(name, txid)
|
||||
d.addCallback(_save_claim_id)
|
||||
return d
|
||||
|
||||
d.addCallback(_save_sd_hash)
|
||||
d.addCallback(lambda r: _save_claim(r[0], r[1]) if r else None)
|
||||
|
|
|
@ -80,13 +80,16 @@ class LBRYFileManager(object):
|
|||
d.addCallback(lambda downloader: downloader.restore())
|
||||
return d
|
||||
|
||||
def log_error(err):
|
||||
def log_error(err, rowid, stream_hash, options):
|
||||
log.error("An error occurred while starting a lbry file: %s", err.getErrorMessage())
|
||||
log.error(rowid)
|
||||
log.error(stream_hash)
|
||||
log.error(options)
|
||||
|
||||
def start_lbry_files(lbry_files_and_options):
|
||||
for rowid, stream_hash, options in lbry_files_and_options:
|
||||
d = set_options_and_restore(rowid, stream_hash, options)
|
||||
d.addErrback(log_error)
|
||||
d.addErrback(lambda err: log_error(err, rowid, stream_hash, options))
|
||||
return True
|
||||
|
||||
d = self._get_all_lbry_files()
|
||||
|
|
|
@ -1,18 +1,12 @@
|
|||
import json
|
||||
import logging
|
||||
from time import sleep
|
||||
|
||||
from bitcoinrpc.authproxy import AuthServiceProxy
|
||||
from twisted.internet.task import LoopingCall
|
||||
from zope.interface import implements
|
||||
#from lbrynet.core.StreamDescriptor import PlainStreamDescriptorWriter, BlobStreamDescriptorWriter
|
||||
from lbrynet.core.PaymentRateManager import PaymentRateManager
|
||||
from lbrynet.lbryfilemanager.LBRYFileCreator import create_lbry_file
|
||||
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloader
|
||||
# from lbrynet.lbryfile.StreamDescriptor import get_sd_info
|
||||
from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob, create_plain_sd
|
||||
from lbrynet.lbrynet_console.interfaces import ICommandHandler, ICommandHandlerFactory
|
||||
from lbrynet.core.StreamDescriptor import download_sd_blob#, BlobStreamDescriptorReader
|
||||
from lbrynet.core.StreamDescriptor import download_sd_blob
|
||||
from lbrynet.core.Error import UnknownNameError, InvalidBlobHashError, InsufficientFundsError
|
||||
from lbrynet.core.Error import InvalidStreamInfoError
|
||||
from lbrynet.core.utils import is_valid_blobhash
|
||||
|
|
|
@ -1,6 +1,4 @@
|
|||
import binascii
|
||||
import distutils.version
|
||||
import locale
|
||||
import logging.handlers
|
||||
import mimetypes
|
||||
import os
|
||||
|
@ -14,7 +12,6 @@ import sys
|
|||
import base58
|
||||
import requests
|
||||
import simplejson as json
|
||||
import pkg_resources
|
||||
|
||||
from urllib2 import urlopen
|
||||
from appdirs import user_data_dir
|
||||
|
@ -25,7 +22,7 @@ from twisted.internet import defer, threads, error, reactor
|
|||
from twisted.internet.task import LoopingCall
|
||||
from txjsonrpc import jsonrpclib
|
||||
from txjsonrpc.web import jsonrpc
|
||||
from txjsonrpc.web.jsonrpc import Handler, Proxy
|
||||
from txjsonrpc.web.jsonrpc import Handler
|
||||
|
||||
from lbrynet import __version__ as lbrynet_version
|
||||
from lbryum.version import LBRYUM_VERSION as lbryum_version
|
||||
|
@ -42,14 +39,18 @@ from lbrynet.lbrynet_daemon.LBRYDownloader import GetStream
|
|||
from lbrynet.lbrynet_daemon.LBRYPublisher import Publisher
|
||||
from lbrynet.lbrynet_daemon.LBRYExchangeRateManager import ExchangeRateManager
|
||||
from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient
|
||||
from lbrynet.core.LBRYMetadata import Metadata
|
||||
from lbrynet.core import log_support
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.core.LBRYMetadata import verify_name_characters
|
||||
from lbrynet.core.utils import generate_id
|
||||
from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
|
||||
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, \
|
||||
DEFAULT_WALLET, DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH, LOG_POST_URL, LOG_FILE_NAME, SOURCE_TYPES
|
||||
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, \
|
||||
KNOWN_DHT_NODES, DEFAULT_MAX_KEY_FEE, DEFAULT_WALLET, \
|
||||
DEFAULT_SEARCH_TIMEOUT, DEFAULT_CACHE_TIME, DEFAULT_UI_BRANCH, \
|
||||
LOG_POST_URL, LOG_FILE_NAME
|
||||
from lbrynet.conf import DEFAULT_SD_DOWNLOAD_TIMEOUT
|
||||
from lbrynet.conf import DEFAULT_TIMEOUT, WALLET_TYPES
|
||||
from lbrynet.conf import DEFAULT_TIMEOUT
|
||||
from lbrynet.core.StreamDescriptor import StreamDescriptorIdentifier, download_sd_blob, BlobStreamDescriptorReader
|
||||
from lbrynet.core.Session import LBRYSession
|
||||
from lbrynet.core.PTCWallet import PTCWallet
|
||||
|
@ -133,6 +134,11 @@ OK_CODE = 200
|
|||
REMOTE_SERVER = "www.google.com"
|
||||
|
||||
|
||||
class Parameters(object):
|
||||
def __init__(self, **kwargs):
|
||||
self.__dict__.update(kwargs)
|
||||
|
||||
|
||||
class LBRYDaemon(jsonrpc.JSONRPC):
|
||||
"""
|
||||
LBRYnet daemon, a jsonrpc interface to lbry functions
|
||||
|
@ -320,14 +326,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
else:
|
||||
self.wallet_dir = os.path.join(get_path(FOLDERID.RoamingAppData, UserHandle.current), "lbryum")
|
||||
elif sys.platform == "darwin":
|
||||
# use the path from the bundle if its available.
|
||||
try:
|
||||
import Foundation
|
||||
bundle = Foundation.NSBundle.mainBundle()
|
||||
self.lbrycrdd_path = bundle.pathForResource_ofType_('lbrycrdd', None)
|
||||
except Exception:
|
||||
log.exception('Failed to get path from bundle, falling back to default')
|
||||
self.lbrycrdd_path = "./lbrycrdd"
|
||||
self.lbrycrdd_path = get_darwin_lbrycrdd_path()
|
||||
if self.wallet_type == "lbrycrd":
|
||||
self.wallet_dir = user_data_dir("lbrycrd")
|
||||
else:
|
||||
|
@ -384,7 +383,9 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
log.info("Done writing lbrycrd.conf")
|
||||
|
||||
def _responseFailed(self, err, call):
|
||||
call.cancel()
|
||||
log.debug(err.getTraceback())
|
||||
if call.active():
|
||||
call.cancel()
|
||||
|
||||
def render(self, request):
|
||||
request.content.seek(0, 0)
|
||||
|
@ -427,10 +428,11 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d = defer.maybeDeferred(function, *args)
|
||||
|
||||
# cancel the response if the connection is broken
|
||||
request.notifyFinish().addErrback(self._responseFailed, d)
|
||||
|
||||
notify_finish = request.notifyFinish()
|
||||
notify_finish.addErrback(self._responseFailed, d)
|
||||
d.addErrback(self._ebRender, id)
|
||||
d.addCallback(self._cbRender, request, id, version)
|
||||
d.addErrback(notify_finish.errback)
|
||||
return server.NOT_DONE_YET
|
||||
|
||||
def _cbRender(self, result, request, id, version):
|
||||
|
@ -485,8 +487,6 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
log.info("Scheduling scripts")
|
||||
reactor.callLater(3, self._run_scripts)
|
||||
|
||||
# self.lbrynet_connection_checker.start(3600)
|
||||
|
||||
if self.first_run:
|
||||
d = self._upload_log(log_type="first_run")
|
||||
elif self.upload_log:
|
||||
|
@ -494,11 +494,6 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
else:
|
||||
d = defer.succeed(None)
|
||||
|
||||
# if float(self.session.wallet.wallet_balance) == 0.0:
|
||||
# d.addCallback(lambda _: self._check_first_run())
|
||||
# d.addCallback(self._show_first_run_result)
|
||||
|
||||
# d.addCallback(lambda _: _wait_for_credits() if self.requested_first_run_credits else _announce())
|
||||
d.addCallback(lambda _: _announce())
|
||||
return d
|
||||
|
||||
|
@ -957,6 +952,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d = self.settings.start()
|
||||
d.addCallback(lambda _: self.settings.get_lbryid())
|
||||
d.addCallback(self._set_lbryid)
|
||||
d.addCallback(lambda _: self._modify_loggly_formatter())
|
||||
return d
|
||||
|
||||
def _set_lbryid(self, lbryid):
|
||||
|
@ -972,6 +968,14 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d = self.settings.save_lbryid(self.lbryid)
|
||||
return d
|
||||
|
||||
def _modify_loggly_formatter(self):
|
||||
session_id = base58.b58encode(generate_id())
|
||||
log_support.configure_loggly_handler(
|
||||
lbry_id=base58.b58encode(self.lbryid),
|
||||
session_id=session_id
|
||||
)
|
||||
|
||||
|
||||
def _setup_lbry_file_manager(self):
|
||||
self.startup_status = STARTUP_STAGES[3]
|
||||
self.lbry_file_metadata_manager = DBLBRYFileMetadataManager(self.db_dir)
|
||||
|
@ -1038,62 +1042,6 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
|
||||
return dl
|
||||
|
||||
# def _check_first_run(self):
|
||||
# def _set_first_run_false():
|
||||
# log.info("Not first run")
|
||||
# self.first_run = False
|
||||
# self.session_settings['requested_first_run_credits'] = True
|
||||
# f = open(self.daemon_conf, "w")
|
||||
# f.write(json.dumps(self.session_settings))
|
||||
# f.close()
|
||||
# return 0.0
|
||||
#
|
||||
# if self.wallet_type == 'lbryum':
|
||||
# d = self.session.wallet.is_first_run()
|
||||
# d.addCallback(lambda is_first_run: self._do_first_run() if is_first_run or not self.requested_first_run_credits
|
||||
# else _set_first_run_false())
|
||||
# else:
|
||||
# d = defer.succeed(None)
|
||||
# d.addCallback(lambda _: _set_first_run_false())
|
||||
# return d
|
||||
#
|
||||
# def _do_first_run(self):
|
||||
# def send_request(url, data):
|
||||
# log.info("Requesting first run credits")
|
||||
# r = requests.post(url, json=data)
|
||||
# if r.status_code == 200:
|
||||
# self.requested_first_run_credits = True
|
||||
# self.session_settings['requested_first_run_credits'] = True
|
||||
# f = open(self.daemon_conf, "w")
|
||||
# f.write(json.dumps(self.session_settings))
|
||||
# f.close()
|
||||
# return r.json()['credits_sent']
|
||||
# return 0.0
|
||||
#
|
||||
# def log_error(err):
|
||||
# log.warning("unable to request free credits. %s", err.getErrorMessage())
|
||||
# return 0.0
|
||||
#
|
||||
# def request_credits(address):
|
||||
# url = "http://credreq.lbry.io/requestcredits"
|
||||
# data = {"address": address}
|
||||
# d = threads.deferToThread(send_request, url, data)
|
||||
# d.addErrback(log_error)
|
||||
# return d
|
||||
#
|
||||
# self.first_run = True
|
||||
# d = self.session.wallet.get_new_address()
|
||||
# d.addCallback(request_credits)
|
||||
#
|
||||
# return d
|
||||
#
|
||||
# def _show_first_run_result(self, credits_received):
|
||||
# if credits_received != 0.0:
|
||||
# points_string = locale.format_string("%.2f LBC", (round(credits_received, 2),), grouping=True)
|
||||
# self.startup_message = "Thank you for testing the alpha version of LBRY! You have been given %s for free because we love you. Please hang on for a few minutes for the next block to be mined. When you refresh this page and see your credits you're ready to go!." % points_string
|
||||
# else:
|
||||
# self.startup_message = None
|
||||
|
||||
def _setup_stream_identifier(self):
|
||||
file_saver_factory = LBRYFileSaverFactory(self.session.peer_finder, self.session.rate_limiter,
|
||||
self.session.blob_manager, self.stream_info_manager,
|
||||
|
@ -1132,97 +1080,38 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
return r
|
||||
|
||||
def _download_name(self, name, timeout=DEFAULT_TIMEOUT, download_directory=None,
|
||||
file_name=None, stream_info=None, wait_for_write=True):
|
||||
file_name=None, stream_info=None, wait_for_write=True):
|
||||
"""
|
||||
Add a lbry file to the file manager, start the download, and return the new lbry file.
|
||||
If it already exists in the file manager, return the existing lbry file
|
||||
"""
|
||||
|
||||
if not download_directory:
|
||||
download_directory = self.download_directory
|
||||
elif not os.path.isdir(download_directory):
|
||||
download_directory = self.download_directory
|
||||
|
||||
def _remove_from_wait(r):
|
||||
del self.waiting_on[name]
|
||||
return r
|
||||
|
||||
def _setup_stream(stream_info):
|
||||
if 'sources' in stream_info.keys():
|
||||
stream_hash = stream_info['sources']['lbry_sd_hash']
|
||||
else:
|
||||
stream_hash = stream_info['stream_hash']
|
||||
|
||||
d = self._get_lbry_file_by_sd_hash(stream_hash)
|
||||
def _add_results(l):
|
||||
if l:
|
||||
if os.path.isfile(os.path.join(self.download_directory, l.file_name)):
|
||||
return defer.succeed((stream_info, l))
|
||||
return defer.succeed((stream_info, None))
|
||||
d.addCallback(_add_results)
|
||||
return d
|
||||
|
||||
def _wait_on_lbry_file(f):
|
||||
if os.path.isfile(os.path.join(self.download_directory, f.file_name)):
|
||||
written_file = file(os.path.join(self.download_directory, f.file_name))
|
||||
written_file.seek(0, os.SEEK_END)
|
||||
written_bytes = written_file.tell()
|
||||
written_file.close()
|
||||
else:
|
||||
written_bytes = False
|
||||
|
||||
if not written_bytes:
|
||||
d = defer.succeed(None)
|
||||
d.addCallback(lambda _: reactor.callLater(1, _wait_on_lbry_file, f))
|
||||
return d
|
||||
else:
|
||||
return defer.succeed(_disp_file(f))
|
||||
|
||||
def _disp_file(f):
|
||||
file_path = os.path.join(self.download_directory, f.file_name)
|
||||
log.info("Already downloaded: " + str(f.sd_hash) + " --> " + file_path)
|
||||
return f
|
||||
|
||||
def _get_stream(stream_info):
|
||||
def _wait_for_write():
|
||||
try:
|
||||
if os.path.isfile(os.path.join(self.download_directory, self.streams[name].downloader.file_name)):
|
||||
written_file = file(os.path.join(self.download_directory, self.streams[name].downloader.file_name))
|
||||
written_file.seek(0, os.SEEK_END)
|
||||
written_bytes = written_file.tell()
|
||||
written_file.close()
|
||||
else:
|
||||
written_bytes = False
|
||||
except:
|
||||
written_bytes = False
|
||||
|
||||
if not written_bytes:
|
||||
d = defer.succeed(None)
|
||||
d.addCallback(lambda _: reactor.callLater(1, _wait_for_write))
|
||||
return d
|
||||
else:
|
||||
return defer.succeed(None)
|
||||
|
||||
self.streams[name] = GetStream(self.sd_identifier, self.session, self.session.wallet,
|
||||
self.lbry_file_manager, self.exchange_rate_manager,
|
||||
max_key_fee=self.max_key_fee, data_rate=self.data_rate, timeout=timeout,
|
||||
download_directory=download_directory, file_name=file_name)
|
||||
d = self.streams[name].start(stream_info, name)
|
||||
if wait_for_write:
|
||||
d.addCallback(lambda _: _wait_for_write())
|
||||
d.addCallback(lambda _: self.streams[name].downloader)
|
||||
|
||||
return d
|
||||
helper = _DownloadNameHelper(
|
||||
self, name, timeout, download_directory, file_name, wait_for_write)
|
||||
|
||||
if not stream_info:
|
||||
self.waiting_on[name] = True
|
||||
d = self._resolve_name(name)
|
||||
else:
|
||||
d = defer.succeed(stream_info)
|
||||
d.addCallback(_setup_stream)
|
||||
d.addCallback(lambda (stream_info, lbry_file): _get_stream(stream_info) if not lbry_file else _wait_on_lbry_file(lbry_file))
|
||||
d.addCallback(helper._setup_stream)
|
||||
d.addCallback(helper.wait_or_get_stream)
|
||||
if not stream_info:
|
||||
d.addCallback(_remove_from_wait)
|
||||
d.addCallback(helper._remove_from_wait)
|
||||
return d
|
||||
|
||||
def add_stream(self, name, timeout, download_directory, file_name, stream_info):
|
||||
"""Makes, adds and starts a stream"""
|
||||
self.streams[name] = GetStream(self.sd_identifier,
|
||||
self.session,
|
||||
self.session.wallet,
|
||||
self.lbry_file_manager,
|
||||
self.exchange_rate_manager,
|
||||
max_key_fee=self.max_key_fee,
|
||||
data_rate=self.data_rate,
|
||||
timeout=timeout,
|
||||
download_directory=download_directory,
|
||||
file_name=file_name)
|
||||
d = self.streams[name].start(stream_info, name)
|
||||
return d
|
||||
|
||||
def _get_long_count_timestamp(self):
|
||||
|
@ -1235,44 +1124,16 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
return defer.succeed(True)
|
||||
|
||||
def _resolve_name(self, name, force_refresh=False):
|
||||
try:
|
||||
verify_name_characters(name)
|
||||
except AssertionError:
|
||||
log.error("Bad name")
|
||||
return defer.fail(InvalidNameError("Bad name"))
|
||||
"""Resolves a name. Checks the cache first before going out to the blockchain.
|
||||
|
||||
def _cache_stream_info(stream_info):
|
||||
def _add_txid(txid):
|
||||
self.name_cache[name]['txid'] = txid
|
||||
return defer.succeed(None)
|
||||
|
||||
self.name_cache[name] = {'claim_metadata': stream_info, 'timestamp': self._get_long_count_timestamp()}
|
||||
d = self.session.wallet.get_txid_for_name(name)
|
||||
d.addCallback(_add_txid)
|
||||
d.addCallback(lambda _: self._update_claim_cache())
|
||||
d.addCallback(lambda _: self.name_cache[name]['claim_metadata'])
|
||||
|
||||
return d
|
||||
|
||||
if not force_refresh:
|
||||
if name in self.name_cache.keys():
|
||||
if (self._get_long_count_timestamp() - self.name_cache[name]['timestamp']) < self.cache_time:
|
||||
log.info("Returning cached stream info for lbry://" + name)
|
||||
d = defer.succeed(self.name_cache[name]['claim_metadata'])
|
||||
else:
|
||||
log.info("Refreshing stream info for lbry://" + name)
|
||||
d = self.session.wallet.get_stream_info_for_name(name)
|
||||
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||
else:
|
||||
log.info("Resolving stream info for lbry://" + name)
|
||||
d = self.session.wallet.get_stream_info_for_name(name)
|
||||
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||
else:
|
||||
log.info("Resolving stream info for lbry://" + name)
|
||||
d = self.session.wallet.get_stream_info_for_name(name)
|
||||
d.addCallbacks(_cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||
|
||||
return d
|
||||
Args:
|
||||
name: the lbry://<name> to resolve
|
||||
force_refresh: if True, always go out to the blockchain to resolve.
|
||||
"""
|
||||
if name.startswith('lbry://'):
|
||||
raise ValueError('name %s should not start with lbry://')
|
||||
helper = _ResolveNameHelper(self, name, force_refresh)
|
||||
return helper.get_deferred()
|
||||
|
||||
def _delete_lbry_file(self, lbry_file, delete_file=True):
|
||||
d = self.lbry_file_manager.delete_lbry_file(lbry_file)
|
||||
|
@ -1298,11 +1159,14 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
|
||||
def _get_est_cost(self, name):
|
||||
def _check_est(d, name):
|
||||
if isinstance(d.result, float):
|
||||
log.info("Cost est for lbry://" + name + ": " + str(d.result) + "LBC")
|
||||
else:
|
||||
log.info("Timeout estimating cost for lbry://" + name + ", using key fee")
|
||||
d.cancel()
|
||||
try:
|
||||
if isinstance(d.result, float):
|
||||
log.info("Cost est for lbry://" + name + ": " + str(d.result) + "LBC")
|
||||
return defer.succeed(None)
|
||||
except AttributeError:
|
||||
pass
|
||||
log.info("Timeout estimating cost for lbry://" + name + ", using key fee")
|
||||
d.cancel()
|
||||
return defer.succeed(None)
|
||||
|
||||
def _add_key_fee(data_cost):
|
||||
|
@ -1402,7 +1266,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
'stream_name': f.stream_name,
|
||||
'suggested_file_name': f.suggested_file_name,
|
||||
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash,
|
||||
'lbry_uri': f.uri, 'txid': f.txid,
|
||||
'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id,
|
||||
'total_bytes': size,
|
||||
'written_bytes': written_bytes, 'code': status[0],
|
||||
'message': message})
|
||||
|
@ -1414,7 +1278,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
'points_paid': f.points_paid, 'stopped': f.stopped, 'stream_hash': f.stream_hash,
|
||||
'stream_name': f.stream_name, 'suggested_file_name': f.suggested_file_name,
|
||||
'upload_allowed': f.upload_allowed, 'sd_hash': f.sd_hash, 'total_bytes': size,
|
||||
'written_bytes': written_bytes, 'lbry_uri': f.uri, 'txid': f.txid,
|
||||
'written_bytes': written_bytes, 'lbry_uri': f.uri, 'txid': f.txid, 'claim_id': f.claim_id,
|
||||
'code': status[0], 'message': status[1]})
|
||||
|
||||
return d
|
||||
|
@ -1445,7 +1309,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d = self._get_lbry_file_by_sd_hash(val)
|
||||
elif search_by == "file_name":
|
||||
d = self._get_lbry_file_by_file_name(val)
|
||||
d.addCallback(_log_get_lbry_file)
|
||||
# d.addCallback(_log_get_lbry_file)
|
||||
if return_json:
|
||||
d.addCallback(_get_json_for_return)
|
||||
return d
|
||||
|
@ -1811,74 +1675,67 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
"""
|
||||
|
||||
def _convert_amount_to_float(r):
|
||||
r['amount'] = float(r['amount']) / 10**8
|
||||
return r
|
||||
if not r:
|
||||
return False
|
||||
else:
|
||||
r['amount'] = float(r['amount']) / 10**8
|
||||
return r
|
||||
|
||||
name = p['name']
|
||||
d = self.session.wallet.get_claim_info(name)
|
||||
txid = p.get('txid', None)
|
||||
d = self.session.wallet.get_claim_info(name, txid)
|
||||
d.addCallback(_convert_amount_to_float)
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
return d
|
||||
|
||||
def _process_get_parameters(self, p):
|
||||
"""Extract info from input parameters and fill in default values for `get` call."""
|
||||
# TODO: this process can be abstracted s.t. each method
|
||||
# can spec what parameters it expects and how to set default values
|
||||
timeout = p.get('timeout', self.download_timeout)
|
||||
download_directory = p.get('download_directory', self.download_directory)
|
||||
file_name = p.get('file_name')
|
||||
stream_info = p.get('stream_info')
|
||||
sd_hash = get_sd_hash(stream_info)
|
||||
wait_for_write = p.get('wait_for_write', True)
|
||||
name = p.get('name')
|
||||
return Parameters(
|
||||
timeout=timeout,
|
||||
download_directory=download_directory,
|
||||
file_name=file_name,
|
||||
stream_info=stream_info,
|
||||
sd_hash=sd_hash,
|
||||
wait_for_write=wait_for_write,
|
||||
name=name
|
||||
)
|
||||
|
||||
def jsonrpc_get(self, p):
|
||||
"""
|
||||
Download stream from a LBRY uri
|
||||
"""Download stream from a LBRY uri.
|
||||
|
||||
Args:
|
||||
'name': name to download, string
|
||||
'download_directory': optional, path to directory where file will be saved, string
|
||||
'file_name': optional, a user specified name for the downloaded file
|
||||
'stream_info': optional, specified stream info overrides name
|
||||
'timeout': optional
|
||||
'wait_for_write': optional, defaults to True
|
||||
Returns:
|
||||
'stream_hash': hex string
|
||||
'path': path of download
|
||||
"""
|
||||
|
||||
if 'timeout' not in p.keys():
|
||||
timeout = self.download_timeout
|
||||
else:
|
||||
timeout = p['timeout']
|
||||
|
||||
if 'download_directory' not in p.keys():
|
||||
download_directory = self.download_directory
|
||||
else:
|
||||
download_directory = p['download_directory']
|
||||
|
||||
if 'file_name' in p.keys():
|
||||
file_name = p['file_name']
|
||||
else:
|
||||
file_name = None
|
||||
|
||||
if 'stream_info' in p.keys():
|
||||
stream_info = p['stream_info']
|
||||
if 'sources' in stream_info.keys():
|
||||
sd_hash = stream_info['sources']['lbry_sd_hash']
|
||||
else:
|
||||
sd_hash = stream_info['stream_hash']
|
||||
else:
|
||||
stream_info = None
|
||||
|
||||
if 'wait_for_write' in p.keys():
|
||||
wait_for_write = p['wait_for_write']
|
||||
else:
|
||||
wait_for_write = True
|
||||
|
||||
if 'name' in p.keys():
|
||||
name = p['name']
|
||||
if p['name'] not in self.waiting_on.keys():
|
||||
d = self._download_name(name=name, timeout=timeout, download_directory=download_directory,
|
||||
stream_info=stream_info, file_name=file_name, wait_for_write=wait_for_write)
|
||||
d.addCallback(lambda l: {'stream_hash': sd_hash,
|
||||
'path': os.path.join(self.download_directory, l.file_name)}
|
||||
if stream_info else
|
||||
{'stream_hash': l.sd_hash,
|
||||
'path': os.path.join(self.download_directory, l.file_name)})
|
||||
d.addCallback(lambda message: self._render_response(message, OK_CODE))
|
||||
else:
|
||||
d = server.failure
|
||||
else:
|
||||
d = server.failure
|
||||
|
||||
params = self._process_get_parameters(p)
|
||||
if not params.name:
|
||||
return server.failure
|
||||
if params.name in self.waiting_on:
|
||||
return server.failure
|
||||
d = self._download_name(name=params.name,
|
||||
timeout=params.timeout,
|
||||
download_directory=params.download_directory,
|
||||
stream_info=params.stream_info,
|
||||
file_name=params.file_name,
|
||||
wait_for_write=params.wait_for_write)
|
||||
d.addCallback(get_output_callback(params))
|
||||
d.addCallback(lambda message: self._render_response(message, OK_CODE))
|
||||
return d
|
||||
|
||||
def jsonrpc_stop_lbry_file(self, p):
|
||||
|
@ -1955,7 +1812,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
List of search results
|
||||
"""
|
||||
|
||||
# TODO: change this function to "search", and use cached stream size info from the search server
|
||||
# TODO: change this function to "search"
|
||||
|
||||
if 'search' in p.keys():
|
||||
search = p['search']
|
||||
|
@ -2024,26 +1881,31 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
Claim txid
|
||||
"""
|
||||
|
||||
def _set_address(address, currency, m):
|
||||
log.info("Generated new address for key fee: " + str(address))
|
||||
m['fee'][currency]['address'] = address
|
||||
return m
|
||||
|
||||
name = p['name']
|
||||
|
||||
log.info("Publish: ")
|
||||
log.info(p)
|
||||
|
||||
try:
|
||||
verify_name_characters(name)
|
||||
except:
|
||||
except AssertionError:
|
||||
log.error("Bad name")
|
||||
return defer.fail(InvalidNameError("Bad name"))
|
||||
|
||||
bid = p['bid']
|
||||
file_path = p['file_path']
|
||||
metadata = p['metadata']
|
||||
|
||||
def _set_address(address, currency):
|
||||
log.info("Generated new address for key fee: " + str(address))
|
||||
metadata['fee'][currency]['address'] = address
|
||||
return defer.succeed(None)
|
||||
|
||||
def _delete_data(lbry_file):
|
||||
txid = lbry_file.txid
|
||||
d = self._delete_lbry_file(lbry_file, delete_file=False)
|
||||
d.addCallback(lambda _: txid)
|
||||
return d
|
||||
try:
|
||||
metadata = Metadata(p['metadata'])
|
||||
make_lbry_file = False
|
||||
except AssertionError:
|
||||
make_lbry_file = True
|
||||
metadata = p['metadata']
|
||||
file_path = p['file_path']
|
||||
|
||||
if not self.pending_claim_checker.running:
|
||||
self.pending_claim_checker.start(30)
|
||||
|
@ -2057,15 +1919,16 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
for c in metadata['fee']:
|
||||
if 'address' not in metadata['fee'][c]:
|
||||
d.addCallback(lambda _: self.session.wallet.get_new_address())
|
||||
d.addCallback(lambda addr: _set_address(addr, c))
|
||||
|
||||
pub = Publisher(self.session, self.lbry_file_manager, self.session.wallet)
|
||||
d.addCallback(lambda _: self._get_lbry_file_by_uri(name))
|
||||
d.addCallbacks(lambda l: None if not l else _delete_data(l), lambda _: None)
|
||||
d.addCallback(lambda r: pub.start(name, file_path, bid, metadata, r))
|
||||
d.addCallback(lambda addr: _set_address(addr, c, metadata))
|
||||
else:
|
||||
d.addCallback(lambda _: metadata)
|
||||
if make_lbry_file:
|
||||
pub = Publisher(self.session, self.lbry_file_manager, self.session.wallet)
|
||||
d.addCallback(lambda meta: pub.start(name, file_path, bid, meta))
|
||||
else:
|
||||
d.addCallback(lambda meta: self.session.wallet.claim_name(name, bid, meta))
|
||||
d.addCallback(lambda txid: self._add_to_pending_claims(name, txid))
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
d.addErrback(lambda err: self._render_response(err.getTraceback(), BAD_REQUEST))
|
||||
|
||||
return d
|
||||
|
||||
|
@ -2095,6 +1958,25 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
|
||||
return d
|
||||
|
||||
def jsonrpc_support_claim(self, p):
|
||||
"""
|
||||
Support a name claim
|
||||
|
||||
Args:
|
||||
'name': name
|
||||
'claim_id': claim id of claim to support
|
||||
'amount': amount to support by
|
||||
Return:
|
||||
txid
|
||||
"""
|
||||
|
||||
name = p['name']
|
||||
claim_id = p['claim_id']
|
||||
amount = p['amount']
|
||||
d = self.session.wallet.support_claim(name, claim_id, amount)
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
return d
|
||||
|
||||
def jsonrpc_get_name_claims(self):
|
||||
"""
|
||||
Get my name claims
|
||||
|
@ -2118,6 +2000,21 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
|
||||
return d
|
||||
|
||||
def jsonrpc_get_claims_for_name(self, p):
|
||||
"""
|
||||
Get claims for a name
|
||||
|
||||
Args:
|
||||
'name': name
|
||||
Returns
|
||||
list of name claims
|
||||
"""
|
||||
|
||||
name = p['name']
|
||||
d = self.session.wallet.get_claims_for_name(name)
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
return d
|
||||
|
||||
def jsonrpc_get_transaction_history(self):
|
||||
"""
|
||||
Get transaction history
|
||||
|
@ -2437,7 +2334,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d = threads.deferToThread(subprocess.Popen, ['open', '-R', path])
|
||||
else:
|
||||
# No easy way to reveal specific files on Linux, so just open the containing directory
|
||||
d = threads.deferToThread(subprocess.Popen, ['xdg-open', os.dirname(path)])
|
||||
d = threads.deferToThread(subprocess.Popen, ['xdg-open', os.path.dirname(path)])
|
||||
|
||||
d.addCallback(lambda _: self._render_response(True, OK_CODE))
|
||||
return d
|
||||
|
@ -2477,3 +2374,183 @@ def get_version_from_tag(tag):
|
|||
return match.group(1)
|
||||
else:
|
||||
raise Exception('Failed to parse version from tag {}'.format(tag))
|
||||
|
||||
|
||||
def get_sd_hash(stream_info):
|
||||
if not stream_info:
|
||||
return None
|
||||
try:
|
||||
return stream_info['sources']['lbry_sd_hash']
|
||||
except KeyError:
|
||||
return stream_info.get('stream_hash')
|
||||
|
||||
|
||||
def get_output_callback(params):
|
||||
def callback(l):
|
||||
return {
|
||||
'stream_hash': params.sd_hash if params.stream_info else l.sd_hash,
|
||||
'path': os.path.join(params.download_directory, l.file_name)
|
||||
}
|
||||
return callback
|
||||
|
||||
|
||||
def get_darwin_lbrycrdd_path():
|
||||
# use the path from the bundle if its available.
|
||||
default = "./lbrycrdd"
|
||||
try:
|
||||
import Foundation
|
||||
except ImportError:
|
||||
log.warning('Foundation module not installed, falling back to default lbrycrdd path')
|
||||
return default
|
||||
else:
|
||||
try:
|
||||
bundle = Foundation.NSBundle.mainBundle()
|
||||
return bundle.pathForResource_ofType_('lbrycrdd', None)
|
||||
except Exception:
|
||||
log.exception('Failed to get path from bundle, falling back to default')
|
||||
return default
|
||||
|
||||
|
||||
|
||||
class _DownloadNameHelper(object):
|
||||
def __init__(self, daemon, name, timeout=DEFAULT_TIMEOUT, download_directory=None,
|
||||
file_name=None, wait_for_write=True):
|
||||
self.daemon = daemon
|
||||
self.name = name
|
||||
self.timeout = timeout
|
||||
if not download_directory or not os.path.isdir(download_directory):
|
||||
self.download_directory = daemon.download_directory
|
||||
else:
|
||||
self.download_directory = download_directory
|
||||
self.file_name = file_name
|
||||
self.wait_for_write = wait_for_write
|
||||
|
||||
def _setup_stream(self, stream_info):
|
||||
stream_hash = get_sd_hash(stream_info)
|
||||
d = self.daemon._get_lbry_file_by_sd_hash(stream_hash)
|
||||
d.addCallback(self._add_results_callback(stream_info))
|
||||
return d
|
||||
|
||||
def _add_results_callback(self, stream_info):
|
||||
def add_results(l):
|
||||
if l:
|
||||
if os.path.isfile(os.path.join(self.download_directory, l.file_name)):
|
||||
return defer.succeed((stream_info, l))
|
||||
return defer.succeed((stream_info, None))
|
||||
return add_results
|
||||
|
||||
def wait_or_get_stream(self, args):
|
||||
stream_info, lbry_file = args
|
||||
if lbry_file:
|
||||
return self._wait_on_lbry_file(lbry_file)
|
||||
else:
|
||||
return self._get_stream(stream_info)
|
||||
|
||||
def _get_stream(self, stream_info):
|
||||
d = self.daemon.add_stream(
|
||||
self.name, self.timeout, self.download_directory, self.file_name, stream_info)
|
||||
if self.wait_for_write:
|
||||
d.addCallback(lambda _: self._wait_for_write())
|
||||
d.addCallback(lambda _: self.daemon.streams[self.name].downloader)
|
||||
return d
|
||||
|
||||
def _wait_for_write(self):
|
||||
d = defer.succeed(None)
|
||||
if not self.has_downloader_wrote():
|
||||
d.addCallback(lambda _: reactor.callLater(1, self._wait_for_write))
|
||||
return d
|
||||
|
||||
def has_downloader_wrote(self):
|
||||
downloader = self.daemon.streams[self.name].downloader
|
||||
if not downloader:
|
||||
return False
|
||||
return self.get_written_bytes(downloader.file_name)
|
||||
|
||||
def _wait_on_lbry_file(self, f):
|
||||
written_bytes = self.get_written_bytes(f.file_name)
|
||||
if written_bytes:
|
||||
return defer.succeed(self._disp_file(f))
|
||||
d = defer.succeed(None)
|
||||
d.addCallback(lambda _: reactor.callLater(1, self._wait_on_lbry_file, f))
|
||||
return d
|
||||
|
||||
def get_written_bytes(self, file_name):
|
||||
"""Returns the number of bytes written to `file_name`.
|
||||
|
||||
Returns False if there were issues reading `file_name`.
|
||||
"""
|
||||
try:
|
||||
file_path = os.path.join(self.download_directory, file_name)
|
||||
if os.path.isfile(file_path):
|
||||
written_file = file(file_path)
|
||||
written_file.seek(0, os.SEEK_END)
|
||||
written_bytes = written_file.tell()
|
||||
written_file.close()
|
||||
else:
|
||||
written_bytes = False
|
||||
except Exception:
|
||||
writen_bytes = False
|
||||
return written_bytes
|
||||
|
||||
def _disp_file(self, f):
|
||||
file_path = os.path.join(self.download_directory, f.file_name)
|
||||
log.info("Already downloaded: %s --> %s", f.sd_hash, file_path)
|
||||
return f
|
||||
|
||||
def _remove_from_wait(self, r):
|
||||
del self.daemon.waiting_on[self.name]
|
||||
return r
|
||||
|
||||
|
||||
class _ResolveNameHelper(object):
|
||||
def __init__(self, daemon, name, force_refresh):
|
||||
self.daemon = daemon
|
||||
self.name = name
|
||||
self.force_refresh = force_refresh
|
||||
|
||||
def get_deferred(self):
|
||||
if self.need_fresh_stream():
|
||||
log.info("Resolving stream info for lbry://%s", self.name)
|
||||
d = self.wallet.get_stream_info_for_name(self.name)
|
||||
d.addCallbacks(self._cache_stream_info, lambda _: defer.fail(UnknownNameError))
|
||||
else:
|
||||
log.debug("Returning cached stream info for lbry://%s", self.name)
|
||||
d = defer.succeed(self.name_data['claim_metadata'])
|
||||
return d
|
||||
|
||||
@property
|
||||
def name_data(self):
|
||||
return self.daemon.name_cache[self.name]
|
||||
|
||||
@property
|
||||
def wallet(self):
|
||||
return self.daemon.session.wallet
|
||||
|
||||
def now(self):
|
||||
return self.daemon._get_long_count_timestamp()
|
||||
|
||||
def _add_txid(self, txid):
|
||||
self.name_data['txid'] = txid
|
||||
return defer.succeed(None)
|
||||
|
||||
def _cache_stream_info(self, stream_info):
|
||||
self.daemon.name_cache[self.name] = {
|
||||
'claim_metadata': stream_info,
|
||||
'timestamp': self.now()
|
||||
}
|
||||
d = self.wallet.get_txid_for_name(self.name)
|
||||
d.addCallback(self._add_txid)
|
||||
d.addCallback(lambda _: self.daemon._update_claim_cache())
|
||||
d.addCallback(lambda _: self.name_data['claim_metadata'])
|
||||
return d
|
||||
|
||||
def need_fresh_stream(self):
|
||||
return self.force_refresh or not self.is_in_cache() or self.is_cached_name_expired()
|
||||
|
||||
def is_in_cache(self):
|
||||
return self.name in self.daemon.name_cache
|
||||
|
||||
def is_cached_name_expired(self):
|
||||
time_in_cache = self.now() - self.name_data['timestamp']
|
||||
return time_in_cache >= self.daemon.cache_time
|
||||
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import sys
|
||||
import json
|
||||
|
||||
from lbrynet.conf import API_CONNECTION_STRING, LOG_FILE_NAME
|
||||
from lbrynet.conf import API_CONNECTION_STRING
|
||||
from jsonrpc.proxy import JSONRPCProxy
|
||||
|
||||
help_msg = "Useage: lbrynet-cli method json-args\n" \
|
||||
|
|
|
@ -5,7 +5,6 @@ import os
|
|||
import webbrowser
|
||||
import sys
|
||||
import socket
|
||||
import platform
|
||||
from appdirs import user_data_dir
|
||||
|
||||
from twisted.web import server
|
||||
|
@ -14,8 +13,8 @@ from jsonrpc.proxy import JSONRPCProxy
|
|||
|
||||
from lbrynet.core import log_support
|
||||
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer, LBRYDaemonRequest
|
||||
from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_ADDRESS, API_PORT, \
|
||||
DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
||||
from lbrynet.conf import API_CONNECTION_STRING, API_INTERFACE, API_PORT, \
|
||||
UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
||||
|
||||
# TODO: stop it!
|
||||
if sys.platform != "darwin":
|
||||
|
@ -74,11 +73,11 @@ def start():
|
|||
parser.set_defaults(branch=False, launchui=True, logtoconsole=False, quiet=False)
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
log_support.configureFileHandler(lbrynet_log)
|
||||
log_support.disable_noisy_loggers()
|
||||
log_support.configure_file_handler(lbrynet_log)
|
||||
log_support.configure_loggly_handler()
|
||||
if args.logtoconsole:
|
||||
log_support.configureConsole()
|
||||
|
||||
log_support.configure_console()
|
||||
|
||||
try:
|
||||
JSONRPCProxy.from_url(API_CONNECTION_STRING).is_running()
|
||||
|
|
|
@ -9,15 +9,14 @@ import tempfile
|
|||
import time
|
||||
import cgi
|
||||
|
||||
from datetime import datetime
|
||||
from appdirs import user_data_dir
|
||||
from twisted.web import server, static, resource
|
||||
from twisted.internet import defer, interfaces, error, reactor, task, threads
|
||||
from twisted.internet import defer, interfaces, error, reactor, threads
|
||||
|
||||
from zope.interface import implements
|
||||
|
||||
from lbrynet.lbrynet_daemon.LBRYDaemon import LBRYDaemon
|
||||
from lbrynet.conf import API_CONNECTION_STRING, API_ADDRESS, DEFAULT_WALLET, UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
||||
from lbrynet.conf import API_ADDRESS, UI_ADDRESS, DEFAULT_UI_BRANCH, LOG_FILE_NAME
|
||||
|
||||
|
||||
# TODO: omg, this code is essentially duplicated in LBRYDaemon
|
||||
|
|
|
@ -5,14 +5,13 @@ import sys
|
|||
|
||||
from copy import deepcopy
|
||||
from appdirs import user_data_dir
|
||||
from datetime import datetime
|
||||
from twisted.internet import defer
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
from lbrynet.core.Error import InvalidStreamInfoError, InsufficientFundsError, KeyFeeAboveMaxAllowed
|
||||
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed
|
||||
from lbrynet.core.PaymentRateManager import PaymentRateManager
|
||||
from lbrynet.core.StreamDescriptor import download_sd_blob
|
||||
from lbrynet.core.LBRYMetadata import Metadata, LBRYFeeValidator
|
||||
from lbrynet.core.LBRYMetadata import LBRYFeeValidator
|
||||
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloaderFactory
|
||||
from lbrynet.conf import DEFAULT_TIMEOUT, LOG_FILE_NAME
|
||||
|
||||
|
@ -150,21 +149,19 @@ class GetStream(object):
|
|||
return self.finished
|
||||
|
||||
def _start_download(self, downloader):
|
||||
def _pay_key_fee():
|
||||
if self.fee is not None:
|
||||
fee_lbc = self.exchange_rate_manager.to_lbc(self.fee).amount
|
||||
reserved_points = self.wallet.reserve_points(self.fee.address, fee_lbc)
|
||||
if reserved_points is None:
|
||||
return defer.fail(InsufficientFundsError())
|
||||
return self.wallet.send_points_to_address(reserved_points, fee_lbc)
|
||||
|
||||
return defer.succeed(None)
|
||||
|
||||
d = _pay_key_fee()
|
||||
|
||||
log.info('Starting download for %s', self.name)
|
||||
self.downloader = downloader
|
||||
self.download_path = os.path.join(downloader.download_directory, downloader.file_name)
|
||||
|
||||
d = self._pay_key_fee()
|
||||
d.addCallback(lambda _: log.info("Downloading %s --> %s", self.stream_hash, self.downloader.file_name))
|
||||
d.addCallback(lambda _: self.downloader.start())
|
||||
|
||||
def _pay_key_fee(self):
|
||||
if self.fee is not None:
|
||||
fee_lbc = self.exchange_rate_manager.to_lbc(self.fee).amount
|
||||
reserved_points = self.wallet.reserve_points(self.fee.address, fee_lbc)
|
||||
if reserved_points is None:
|
||||
return defer.fail(InsufficientFundsError())
|
||||
return self.wallet.send_points_to_address(reserved_points, fee_lbc)
|
||||
return defer.succeed(None)
|
||||
|
|
|
@ -46,7 +46,7 @@ class Publisher(object):
|
|||
self.reflector_server, self.reflector_port = reflector_server[0], reflector_server[1]
|
||||
self.metadata = {}
|
||||
|
||||
def start(self, name, file_path, bid, metadata, old_txid):
|
||||
def start(self, name, file_path, bid, metadata):
|
||||
|
||||
def _show_result():
|
||||
log.info("Published %s --> lbry://%s txid: %s", self.file_name, self.publish_name, self.txid)
|
||||
|
@ -56,7 +56,6 @@ class Publisher(object):
|
|||
self.file_path = file_path
|
||||
self.bid_amount = bid
|
||||
self.metadata = metadata
|
||||
self.old_txid = old_txid
|
||||
|
||||
d = self._check_file_path(self.file_path)
|
||||
d.addCallback(lambda _: create_lbry_file(self.session, self.lbry_file_manager,
|
||||
|
@ -117,21 +116,12 @@ class Publisher(object):
|
|||
self.metadata['content-type'] = mimetypes.guess_type(os.path.join(self.lbry_file.download_directory,
|
||||
self.lbry_file.file_name))[0]
|
||||
self.metadata['ver'] = CURRENT_METADATA_VERSION
|
||||
m = Metadata(self.metadata)
|
||||
|
||||
if self.old_txid:
|
||||
|
||||
d = self.wallet.abandon_name(self.old_txid)
|
||||
d.addCallback(lambda tx: log.info("Abandoned tx %s" % str(tx)))
|
||||
d.addCallback(lambda _: self.wallet.claim_name(self.publish_name,
|
||||
self.bid_amount,
|
||||
Metadata(self.metadata)))
|
||||
else:
|
||||
d = self.wallet.claim_name(self.publish_name,
|
||||
self.bid_amount,
|
||||
Metadata(self.metadata))
|
||||
def set_tx_hash(txid):
|
||||
self.txid = txid
|
||||
|
||||
d = self.wallet.claim_name(self.publish_name, self.bid_amount, m)
|
||||
d.addCallback(set_tx_hash)
|
||||
return d
|
||||
|
||||
|
@ -146,4 +136,4 @@ class Publisher(object):
|
|||
log.error(error_message)
|
||||
log.error(message, str(self.file_name), str(self.publish_name), err.getTraceback())
|
||||
|
||||
return defer.succeed(error_message)
|
||||
return defer.fail(Exception("Publish failed"))
|
||||
|
|
|
@ -1,7 +1,6 @@
|
|||
import logging
|
||||
import random
|
||||
from txjsonrpc.web.jsonrpc import Proxy
|
||||
from twisted.internet import defer
|
||||
from lbrynet.conf import SEARCH_SERVERS
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
|
|
@ -6,7 +6,6 @@ CLI for sending rpc commands to a DHT node
|
|||
from twisted.internet import reactor
|
||||
from txjsonrpc.web.jsonrpc import Proxy
|
||||
import argparse
|
||||
import sys
|
||||
|
||||
|
||||
def print_value(value):
|
||||
|
@ -39,4 +38,4 @@ def main():
|
|||
d = proxy.callRemote(args.rpc_command)
|
||||
d.addCallbacks(print_value, print_error)
|
||||
d.addBoth(lambda _: shut_down())
|
||||
reactor.run()
|
||||
reactor.run()
|
||||
|
|
|
@ -27,7 +27,7 @@ if not os.path.isfile(lbrycrdd_path_conf):
|
|||
f.write(lbrycrdd_path)
|
||||
f.close()
|
||||
|
||||
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer
|
||||
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer, LBRYDaemonRequest
|
||||
from lbrynet.conf import API_PORT, API_INTERFACE, ICON_PATH, APP_NAME
|
||||
from lbrynet.conf import UI_ADDRESS
|
||||
|
||||
|
@ -74,16 +74,13 @@ class LBRYDaemonApp(AppKit.NSApplication):
|
|||
LBRYNotify("LBRY needs an internet connection to start, try again when one is available")
|
||||
sys.exit(0)
|
||||
|
||||
# if not subprocess.check_output("git ls-remote https://github.com/lbryio/lbry-web-ui.git | grep HEAD | cut -f 1",
|
||||
# shell=True):
|
||||
# LBRYNotify(
|
||||
# "You should have been prompted to install xcode command line tools, please do so and then start LBRY")
|
||||
# sys.exit(0)
|
||||
|
||||
lbry = LBRYDaemonServer()
|
||||
d = lbry.start()
|
||||
d.addCallback(lambda _: webbrowser.open(UI_ADDRESS))
|
||||
reactor.listenTCP(API_PORT, server.Site(lbry.root), interface=API_INTERFACE)
|
||||
lbrynet_server = server.Site(lbry.root)
|
||||
lbrynet_server.requestFactory = LBRYDaemonRequest
|
||||
reactor.listenTCP(API_PORT, lbrynet_server, interface=API_INTERFACE)
|
||||
|
||||
def openui_(self, sender):
|
||||
webbrowser.open(UI_ADDRESS)
|
||||
|
|
|
@ -70,7 +70,10 @@ fi
|
|||
|
||||
# add lbrycrdd as a resource. Following
|
||||
# http://stackoverflow.com/questions/11370012/can-executables-made-with-py2app-include-other-terminal-scripts-and-run-them
|
||||
wget https://github.com/lbryio/lbrycrd/releases/download/v0.3-osx/lbrycrdd
|
||||
# LBRYCRDD_URL="$(curl https://api.github.com/repos/lbryio/lbrycrd/releases/latest | grep 'browser_download_url' | grep osx | cut -d'"' -f4)"
|
||||
LBRYCRDD_URL="https://github.com/lbryio/lbrycrd/releases/download/v0.3.15/lbrycrd-osx.zip"
|
||||
wget "${LBRYCRDD_URL}" --output-document lbrycrd-osx.zip
|
||||
unzip lbrycrd-osx.zip
|
||||
python setup_app.py py2app --resources lbrycrdd
|
||||
|
||||
chmod +x "${DEST}/dist/LBRY.app/Contents/Resources/lbrycrdd"
|
||||
|
|
|
@ -44,4 +44,5 @@ trial tests
|
|||
|
||||
# Ignoring distutils because: https://github.com/PyCQA/pylint/issues/73
|
||||
# TODO: as code quality improves, make pylint be more strict
|
||||
pylint -E --disable=inherit-non-class --disable=no-member --ignored-modules=distutils lbrynet
|
||||
pylint -E --disable=inherit-non-class --disable=no-member --ignored-modules=distutils \
|
||||
--enable=unused-import lbrynet
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
[Desktop Entry]
|
||||
Version=0.3.13
|
||||
Version=0.3.17
|
||||
Name=LBRY
|
||||
Comment=The world's first user-owned content marketplace
|
||||
Icon=lbry
|
||||
|
|
|
@ -170,7 +170,7 @@ addfile "$PACKAGING_DIR/lbry-temp-symlink" usr/bin/lbry
|
|||
|
||||
# add lbrycrdd and lbrycrd-cli
|
||||
mkdir -p "$PACKAGING_DIR/bins"
|
||||
wget http://s3.amazonaws.com/files.lbry.io/bins.zip --output-document "$PACKAGING_DIR/bins.zip"
|
||||
wget "$(curl https://api.github.com/repos/lbryio/lbrycrd/releases/latest | grep 'browser_download_url' | grep linux | cut -d'"' -f4)" --output-document "$PACKAGING_DIR/bins.zip"
|
||||
unzip "$PACKAGING_DIR/bins.zip" -d "$PACKAGING_DIR/bins/"
|
||||
addfile "$PACKAGING_DIR/bins/lbrycrdd" usr/bin/lbrycrdd
|
||||
addfile "$PACKAGING_DIR/bins/lbrycrd-cli" usr/bin/lbrycrd-cli
|
||||
|
|
|
@ -9,7 +9,7 @@ gmpy==1.17
|
|||
jsonrpc==1.2
|
||||
jsonrpclib==0.1.7
|
||||
https://github.com/lbryio/lbryum/tarball/master/#egg=lbryum
|
||||
leveldb==0.193
|
||||
loggly-python-handler==1.0.0
|
||||
miniupnpc==1.9
|
||||
pbkdf2==1.3
|
||||
protobuf==3.0.0b3
|
||||
|
@ -17,6 +17,7 @@ pycrypto==2.6.1
|
|||
python-bitcoinrpc==0.1
|
||||
qrcode==5.2.2
|
||||
requests==2.9.1
|
||||
requests_futures==0.9.7
|
||||
seccure==0.3.1.3
|
||||
simplejson==3.8.2
|
||||
six==1.9.0
|
||||
|
@ -26,4 +27,4 @@ unqlite==0.2.0
|
|||
wsgiref==0.1.2
|
||||
zope.interface==4.1.3
|
||||
base58==0.2.2
|
||||
googlefinance==0.7
|
||||
googlefinance==0.7
|
||||
|
|
2
setup.py
2
setup.py
|
@ -25,7 +25,7 @@ console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUp
|
|||
|
||||
requires = ['pycrypto', 'twisted', 'miniupnpc', 'yapsy', 'seccure',
|
||||
'python-bitcoinrpc==0.1', 'txJSON-RPC', 'requests>=2.4.2', 'unqlite==0.2.0',
|
||||
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance']
|
||||
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance', 'requests_futures']
|
||||
|
||||
setup(name='lbrynet',
|
||||
description='A decentralized media library and marketplace',
|
||||
|
|
Loading…
Reference in a new issue