forked from LBRYCommunity/lbry-sdk
commit
735e9195b5
28 changed files with 816 additions and 191 deletions
|
@ -1,5 +1,5 @@
|
|||
[bumpversion]
|
||||
current_version = 0.3.22
|
||||
current_version = 0.4.0
|
||||
commit = True
|
||||
tag = True
|
||||
|
||||
|
|
|
@ -1,10 +1,10 @@
|
|||
#### Installing the LBRY app
|
||||
--------------------------
|
||||
|
||||
Installing LBRY is simple. You can get a dmg installer for OS X (Mavericks and up) or a .deb for linux [here](https://lbry.io/get).
|
||||
Installing LBRY is simple. You can get a dmg installer for OS X or a .deb for linux [here](https://github.com/lbryio/lbry/releases/latest).
|
||||
|
||||
##### OS X
|
||||
Just drag and drop LBRY.app into your applications folder (replacing any older versions). When it's running you'll have a LBRY icon in your status bar.
|
||||
Just drag and drop LBRY.app into your applications folder (replacing any older versions). When it's running you'll have a LBRY icon in your status bar and the browser will open to the UI.
|
||||
|
||||
##### Linux
|
||||
Double click the .deb file and follow the prompts. The app can be started by searching "LBRY", and it can be turned off by clicking the red 'x' in the browser interface.
|
||||
|
@ -17,7 +17,7 @@ On both systems you can also open the UI while the app is running by going to lb
|
|||
--------------------------
|
||||
|
||||
##### OS X
|
||||
You can install LBRY command line by running `curl -sL https://rawgit.com/lbryio/lbry-setup/master/lbry_setup_osx.sh | sudo bash` in a terminal. This script will install lbrynet and its dependancies, as well as the app. You can start LBRY by either starting the app or by running `lbrynet-daemon` from a terminal.
|
||||
You can install LBRY command line by running `curl -sL https://raw.githubusercontent.com/lbryio/lbry/master/packaging/osx/install_lbry_source.sh | sudo bash` in a terminal. This script will install lbrynet and its dependencies. You can start LBRY by running `lbrynet-daemon` from a terminal.
|
||||
|
||||
##### Linux
|
||||
On Ubuntu or Mint you can install the prerequisites and lbrynet by running
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
__version__ = "0.3.22"
|
||||
__version__ = "0.4.0"
|
||||
version = tuple(__version__.split('.'))
|
|
@ -4,6 +4,8 @@ Some network wide and also application specific parameters
|
|||
import os
|
||||
|
||||
|
||||
IS_DEVELOPMENT_VERSION = True
|
||||
|
||||
MAX_HANDSHAKE_SIZE = 2**16
|
||||
MAX_REQUEST_SIZE = 2**16
|
||||
MAX_BLOB_REQUEST_SIZE = 2**16
|
||||
|
@ -25,9 +27,13 @@ KNOWN_DHT_NODES = [('104.236.42.182', 4000),
|
|||
|
||||
POINTTRADER_SERVER = 'http://ec2-54-187-192-68.us-west-2.compute.amazonaws.com:2424'
|
||||
#POINTTRADER_SERVER = 'http://127.0.0.1:2424'
|
||||
SEARCH_SERVERS = ["http://lighthouse1.lbry.io:50005",
|
||||
"http://lighthouse2.lbry.io:50005",
|
||||
"http://lighthouse3.lbry.io:50005"]
|
||||
|
||||
if IS_DEVELOPMENT_VERSION:
|
||||
SEARCH_SERVERS = ["http://107.170.207.64:50005"]
|
||||
else:
|
||||
SEARCH_SERVERS = ["http://lighthouse1.lbry.io:50005",
|
||||
"http://lighthouse2.lbry.io:50005",
|
||||
"http://lighthouse3.lbry.io:50005"]
|
||||
|
||||
REFLECTOR_SERVERS = [("reflector.lbry.io", 5566)]
|
||||
|
||||
|
|
|
@ -67,6 +67,9 @@ class BlobManager(DHTHashSupplier):
|
|||
def blob_paid_for(self, blob_hash, amount):
|
||||
pass
|
||||
|
||||
def get_all_verified_blobs(self):
|
||||
pass
|
||||
|
||||
|
||||
class DiskBlobManager(BlobManager):
|
||||
"""This class stores blobs on the hard disk"""
|
||||
|
@ -78,7 +81,7 @@ class DiskBlobManager(BlobManager):
|
|||
self.blob_type = BlobFile
|
||||
self.blob_creator_type = BlobFileCreator
|
||||
self.blobs = {}
|
||||
self.blob_hashes_to_delete = {} # {blob_hash: being_deleted (True/False)}
|
||||
self.blob_hashes_to_delete = {} # {blob_hash: being_deleted (True/False)}
|
||||
self._next_manage_call = None
|
||||
|
||||
def setup(self):
|
||||
|
@ -177,6 +180,11 @@ class DiskBlobManager(BlobManager):
|
|||
def check_consistency(self):
|
||||
return self._check_consistency()
|
||||
|
||||
def get_all_verified_blobs(self):
|
||||
d = self._get_all_verified_blob_hashes()
|
||||
d.addCallback(self.completed_blobs)
|
||||
return d
|
||||
|
||||
def _manage(self):
|
||||
from twisted.internet import reactor
|
||||
|
||||
|
@ -462,6 +470,10 @@ class TempBlobManager(BlobManager):
|
|||
blobs = [b.blob_hash for b in self.blobs.itervalues() if b.blob_hash in blobs_to_check and b.is_validated()]
|
||||
return defer.succeed(blobs)
|
||||
|
||||
def get_all_verified_blobs(self):
|
||||
d = self.completed_blobs(self.blobs)
|
||||
return d
|
||||
|
||||
def hashes_to_announce(self):
|
||||
now = time.time()
|
||||
blobs = [blob_hash for blob_hash, announce_time in self.blob_next_announces.iteritems() if announce_time < now]
|
||||
|
|
|
@ -1,135 +0,0 @@
|
|||
import json
|
||||
|
||||
from copy import deepcopy
|
||||
from lbrynet.conf import CURRENCIES
|
||||
from lbrynet.core import utils
|
||||
import logging
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
BITTREX_FEE = 0.0025
|
||||
|
||||
# Metadata version
|
||||
SOURCE_TYPES = ['lbry_sd_hash', 'url', 'btih']
|
||||
NAME_ALLOWED_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0987654321-'
|
||||
BASE_METADATA_FIELDS = ['title', 'description', 'author', 'language', 'license', 'content-type', 'sources']
|
||||
OPTIONAL_METADATA_FIELDS = ['thumbnail', 'preview', 'fee', 'contact', 'pubkey']
|
||||
|
||||
MV001 = "0.0.1"
|
||||
MV002 = "0.0.2"
|
||||
CURRENT_METADATA_VERSION = MV002
|
||||
|
||||
METADATA_REVISIONS = {}
|
||||
METADATA_REVISIONS[MV001] = {'required': BASE_METADATA_FIELDS, 'optional': OPTIONAL_METADATA_FIELDS}
|
||||
METADATA_REVISIONS[MV002] = {'required': ['nsfw', 'ver'], 'optional': ['license_url']}
|
||||
|
||||
# Fee version
|
||||
BASE_FEE_FIELDS = ['amount', 'address']
|
||||
|
||||
FV001 = "0.0.1"
|
||||
CURRENT_FEE_REVISION = FV001
|
||||
|
||||
FEE_REVISIONS = {}
|
||||
FEE_REVISIONS[FV001] = {'required': BASE_FEE_FIELDS, 'optional': []}
|
||||
|
||||
|
||||
def verify_name_characters(name):
|
||||
for c in name:
|
||||
assert c in NAME_ALLOWED_CHARSET, "Invalid character"
|
||||
return True
|
||||
|
||||
|
||||
class LBRYFeeValidator(dict):
|
||||
def __init__(self, fee_dict):
|
||||
dict.__init__(self)
|
||||
assert len(fee_dict) == 1
|
||||
self.fee_version = None
|
||||
self.currency_symbol = None
|
||||
|
||||
fee_to_load = deepcopy(fee_dict)
|
||||
|
||||
for currency in fee_dict:
|
||||
self._verify_fee(currency, fee_to_load)
|
||||
|
||||
self.amount = self._get_amount()
|
||||
self.address = self[self.currency_symbol]['address']
|
||||
|
||||
def _get_amount(self):
|
||||
amt = self[self.currency_symbol]['amount']
|
||||
if isinstance(amt, float):
|
||||
return amt
|
||||
else:
|
||||
try:
|
||||
return float(amt)
|
||||
except TypeError:
|
||||
log.error('Failed to convert %s to float', amt)
|
||||
raise
|
||||
|
||||
def _verify_fee(self, currency, f):
|
||||
# str in case someone made a claim with a wierd fee
|
||||
assert currency in CURRENCIES, "Unsupported currency: %s" % str(currency)
|
||||
self.currency_symbol = currency
|
||||
self.update({currency: {}})
|
||||
for version in FEE_REVISIONS:
|
||||
self._load_revision(version, f)
|
||||
if not f:
|
||||
self.fee_version = version
|
||||
break
|
||||
assert f[self.currency_symbol] == {}, "Unknown fee keys: %s" % json.dumps(f.keys())
|
||||
|
||||
def _load_revision(self, version, f):
|
||||
for k in FEE_REVISIONS[version]['required']:
|
||||
assert k in f[self.currency_symbol], "Missing required fee field: %s" % k
|
||||
self[self.currency_symbol].update({k: f[self.currency_symbol].pop(k)})
|
||||
for k in FEE_REVISIONS[version]['optional']:
|
||||
if k in f[self.currency_symbol]:
|
||||
self[self.currency_symbol].update({k: f[self.currency_symbol].pop(k)})
|
||||
|
||||
|
||||
class Metadata(dict):
|
||||
@classmethod
|
||||
def load_from_hex(cls, metadata):
|
||||
return cls(json.loads(metadata.decode('hex')))
|
||||
|
||||
def __init__(self, metadata):
|
||||
dict.__init__(self)
|
||||
self.meta_version = None
|
||||
metadata_to_load = deepcopy(metadata)
|
||||
|
||||
self._verify_sources(metadata_to_load)
|
||||
self._verify_metadata(metadata_to_load)
|
||||
|
||||
def _load_revision(self, version, metadata):
|
||||
for k in METADATA_REVISIONS[version]['required']:
|
||||
assert k in metadata, "Missing required metadata field: %s" % k
|
||||
self.update({k: metadata.pop(k)})
|
||||
for k in METADATA_REVISIONS[version]['optional']:
|
||||
if k == 'fee':
|
||||
self._load_fee(metadata)
|
||||
elif k in metadata:
|
||||
self.update({k: metadata.pop(k)})
|
||||
|
||||
def _load_fee(self, metadata):
|
||||
if 'fee' in metadata:
|
||||
self['fee'] = LBRYFeeValidator(metadata.pop('fee'))
|
||||
|
||||
def _verify_sources(self, metadata):
|
||||
assert "sources" in metadata, "No sources given"
|
||||
for source in metadata['sources']:
|
||||
assert source in SOURCE_TYPES, "Unknown source type"
|
||||
|
||||
def _verify_metadata(self, metadata):
|
||||
for version in METADATA_REVISIONS:
|
||||
self._load_revision(version, metadata)
|
||||
if not metadata:
|
||||
self.meta_version = version
|
||||
if utils.version_is_greater_than(self.meta_version, "0.0.1"):
|
||||
assert self.meta_version == self['ver'], "version mismatch"
|
||||
break
|
||||
assert metadata == {}, "Unknown metadata keys: %s" % json.dumps(metadata.keys())
|
||||
|
||||
def serialize(self):
|
||||
return json.dumps(self).encode("hex")
|
||||
|
||||
def as_json(self):
|
||||
return json.dumps(self)
|
|
@ -25,7 +25,7 @@ from lbrynet.interfaces import IRequestCreator, IQueryHandlerFactory, IQueryHand
|
|||
from lbrynet.core.client.ClientRequest import ClientRequest
|
||||
from lbrynet.core.Error import UnknownNameError, InvalidStreamInfoError, RequestCanceledError
|
||||
from lbrynet.core.Error import InsufficientFundsError
|
||||
from lbrynet.core.LBRYMetadata import Metadata
|
||||
from lbrynet.metadata.LBRYMetadata import Metadata
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
alert = logging.getLogger("lbryalert." + __name__)
|
||||
|
@ -322,7 +322,7 @@ class LBRYWallet(object):
|
|||
assert k in r, "getvalueforname response missing field %s" % k
|
||||
|
||||
def _log_success(claim_id):
|
||||
log.info("lbry://%s complies with %s, claimid: %s", name, metadata.meta_version, claim_id)
|
||||
log.info("lbry://%s complies with %s, claimid: %s", name, metadata.version, claim_id)
|
||||
return defer.succeed(None)
|
||||
|
||||
if 'error' in result:
|
||||
|
@ -377,7 +377,7 @@ class LBRYWallet(object):
|
|||
result = {}
|
||||
try:
|
||||
metadata = Metadata(json.loads(claim['value']))
|
||||
meta_ver = metadata.meta_version
|
||||
meta_ver = metadata.version
|
||||
sd_hash = metadata['sources']['lbry_sd_hash']
|
||||
d = self._save_name_metadata(name, txid, sd_hash)
|
||||
except AssertionError:
|
||||
|
@ -1256,7 +1256,7 @@ class LBRYumWallet(LBRYWallet):
|
|||
def _send_name_claim_update(self, name, claim_id, txid, value, amount):
|
||||
def send_claim_update(address):
|
||||
decoded_claim_id = claim_id.decode('hex')[::-1]
|
||||
metadata = Metadata(value).as_json()
|
||||
metadata = json.dumps(Metadata(value))
|
||||
log.info("updateclaim %s %s %f %s %s '%s'", txid, address, amount, name, decoded_claim_id.encode('hex'), json.dumps(metadata))
|
||||
cmd = known_commands['updateclaim']
|
||||
func = getattr(self.cmd_runner, cmd.name)
|
||||
|
|
|
@ -40,10 +40,9 @@ from lbrynet.lbrynet_daemon.LBRYDownloader import GetStream
|
|||
from lbrynet.lbrynet_daemon.LBRYPublisher import Publisher
|
||||
from lbrynet.lbrynet_daemon.LBRYExchangeRateManager import ExchangeRateManager
|
||||
from lbrynet.lbrynet_daemon.Lighthouse import LighthouseClient
|
||||
from lbrynet.core.LBRYMetadata import Metadata
|
||||
from lbrynet.metadata.LBRYMetadata import Metadata, verify_name_characters
|
||||
from lbrynet.core import log_support
|
||||
from lbrynet.core import utils
|
||||
from lbrynet.core.LBRYMetadata import verify_name_characters
|
||||
from lbrynet.core.utils import generate_id
|
||||
from lbrynet.lbrynet_console.LBRYSettings import LBRYSettings
|
||||
from lbrynet.conf import MIN_BLOB_DATA_PAYMENT_RATE, DEFAULT_MAX_SEARCH_RESULTS, \
|
||||
|
@ -1369,6 +1368,24 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d.addCallback(lambda _: factory.finished_deferred)
|
||||
return d
|
||||
|
||||
def _reflect_blobs(self, blob_hashes):
|
||||
if not blob_hashes:
|
||||
return defer.fail(Exception("no lbry file given to reflect"))
|
||||
|
||||
log.info("Reflecting %i blobs" % len(blob_hashes))
|
||||
|
||||
reflector_server = random.choice(REFLECTOR_SERVERS)
|
||||
reflector_address, reflector_port = reflector_server[0], reflector_server[1]
|
||||
log.info("Start reflector client")
|
||||
factory = reflector.BlobClientFactory(
|
||||
self.session.blob_manager,
|
||||
blob_hashes
|
||||
)
|
||||
d = reactor.resolve(reflector_address)
|
||||
d.addCallback(lambda ip: reactor.connectTCP(ip, reflector_port, factory))
|
||||
d.addCallback(lambda _: factory.finished_deferred)
|
||||
return d
|
||||
|
||||
def _log_to_slack(self, msg):
|
||||
URL = "https://hooks.slack.com/services/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"
|
||||
msg = platform.platform() + ": " + base58.b58encode(self.lbryid)[:20] + ", " + msg
|
||||
|
@ -1785,6 +1802,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
stream_info=params.stream_info,
|
||||
file_name=params.file_name,
|
||||
wait_for_write=params.wait_for_write)
|
||||
# TODO: downloading can timeout. Not sure what to do when that happens
|
||||
d.addCallback(get_output_callback(params))
|
||||
d.addCallback(lambda message: self._render_response(message, OK_CODE))
|
||||
return d
|
||||
|
@ -2443,7 +2461,7 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
Reflect a stream
|
||||
|
||||
Args:
|
||||
sd_hash
|
||||
sd_hash: sd_hash of lbry file
|
||||
Returns:
|
||||
True or traceback
|
||||
"""
|
||||
|
@ -2454,12 +2472,32 @@ class LBRYDaemon(jsonrpc.JSONRPC):
|
|||
d.addCallbacks(lambda _: self._render_response(True, OK_CODE), lambda err: self._render_response(err.getTraceback(), OK_CODE))
|
||||
return d
|
||||
|
||||
def jsonrpc_get_blobs(self):
|
||||
def jsonrpc_get_blob_hashes(self):
|
||||
"""
|
||||
return all blobs
|
||||
Returns all blob hashes
|
||||
|
||||
Args:
|
||||
None
|
||||
Returns:
|
||||
list of blob hashes
|
||||
"""
|
||||
|
||||
d = defer.succeed(self.session.blob_manager.blobs)
|
||||
d = self.session.blob_manager.get_all_verified_blobs()
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
return d
|
||||
|
||||
def jsonrpc_reflect_all_blobs(self):
|
||||
"""
|
||||
Reflects all saved blobs
|
||||
|
||||
Args:
|
||||
None
|
||||
Returns:
|
||||
True
|
||||
"""
|
||||
|
||||
d = self.session.blob_manager.get_all_verified_blobs()
|
||||
d.addCallback(self._reflect_blobs)
|
||||
d.addCallback(lambda r: self._render_response(r, OK_CODE))
|
||||
return d
|
||||
|
||||
|
@ -2558,6 +2596,13 @@ class _DownloadNameHelper(object):
|
|||
def _get_stream(self, stream_info):
|
||||
d = self.daemon.add_stream(
|
||||
self.name, self.timeout, self.download_directory, self.file_name, stream_info)
|
||||
|
||||
def _raiseErrorOnTimeout(args):
|
||||
was_successful, _, _ = args
|
||||
if not was_successful:
|
||||
raise Exception('What am I supposed to do with a timed-out downloader?')
|
||||
d.addCallback(_raiseErrorOnTimeout)
|
||||
|
||||
if self.wait_for_write:
|
||||
d.addCallback(lambda _: self._wait_for_write())
|
||||
d.addCallback(lambda _: self.daemon.streams[self.name].downloader)
|
||||
|
|
|
@ -255,6 +255,7 @@ class LBRYFileStreamer(object):
|
|||
self._request.setResponseCode(206)
|
||||
self._request.setHeader('accept-ranges', 'bytes')
|
||||
self._request.setHeader('content-type', self._content_type)
|
||||
self._request.setHeader("Content-Security-Policy", "sandbox")
|
||||
|
||||
self.resumeProducing()
|
||||
|
||||
|
@ -339,6 +340,7 @@ class HostedLBRYFile(resource.Resource):
|
|||
# return d
|
||||
|
||||
def render_GET(self, request):
|
||||
request.setHeader("Content-Security-Policy", "sandbox")
|
||||
if 'name' in request.args.keys():
|
||||
if request.args['name'][0] != 'lbry' and request.args['name'][0] not in self._api.waiting_on.keys():
|
||||
d = self._api._download_name(request.args['name'][0])
|
||||
|
|
|
@ -11,7 +11,7 @@ from twisted.internet.task import LoopingCall
|
|||
from lbrynet.core.Error import InsufficientFundsError, KeyFeeAboveMaxAllowed
|
||||
from lbrynet.core.PaymentRateManager import PaymentRateManager
|
||||
from lbrynet.core.StreamDescriptor import download_sd_blob
|
||||
from lbrynet.core.LBRYMetadata import LBRYFeeValidator
|
||||
from lbrynet.metadata.LBRYFee import LBRYFeeValidator
|
||||
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloaderFactory
|
||||
from lbrynet.conf import DEFAULT_TIMEOUT, LOG_FILE_NAME
|
||||
|
||||
|
@ -75,14 +75,14 @@ class GetStream(object):
|
|||
# TODO: Why is this the stopping condition for the finished callback?
|
||||
if self.download_path:
|
||||
self.checker.stop()
|
||||
self.finished.callback((self.stream_hash, self.download_path))
|
||||
self.finished.callback((True, self.stream_hash, self.download_path))
|
||||
|
||||
elif self.timeout_counter >= self.timeout:
|
||||
log.info("Timeout downloading lbry://%s" % self.resolved_name)
|
||||
self.checker.stop()
|
||||
self.d.cancel()
|
||||
self.code = STREAM_STAGES[4]
|
||||
self.finished.callback(False)
|
||||
self.finished.callback((False, None, None))
|
||||
|
||||
def _convert_max_fee(self):
|
||||
if isinstance(self.max_key_fee, dict):
|
||||
|
|
|
@ -6,7 +6,7 @@ import googlefinance
|
|||
from twisted.internet import defer, reactor
|
||||
from twisted.internet.task import LoopingCall
|
||||
|
||||
from lbrynet.core.LBRYMetadata import LBRYFeeValidator
|
||||
from lbrynet.metadata.LBRYFee import LBRYFeeValidator
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
|
|
@ -10,7 +10,7 @@ from lbrynet.core.Error import InsufficientFundsError
|
|||
from lbrynet.lbryfilemanager.LBRYFileCreator import create_lbry_file
|
||||
from lbrynet.lbryfile.StreamDescriptor import publish_sd_blob
|
||||
from lbrynet.core.PaymentRateManager import PaymentRateManager
|
||||
from lbrynet.core.LBRYMetadata import Metadata, CURRENT_METADATA_VERSION
|
||||
from lbrynet.metadata.LBRYMetadata import Metadata
|
||||
from lbrynet.lbryfilemanager.LBRYFileDownloader import ManagedLBRYFileDownloader
|
||||
from lbrynet import reflector
|
||||
from lbrynet.conf import LOG_FILE_NAME, REFLECTOR_SERVERS
|
||||
|
@ -143,8 +143,8 @@ class Publisher(object):
|
|||
|
||||
def _update_metadata(self):
|
||||
filename = os.path.join(self.lbry_file.download_directory, self.lbry_file.file_name)
|
||||
self.metadata['content-type'] = get_content_type(filename)
|
||||
self.metadata['ver'] = CURRENT_METADATA_VERSION
|
||||
self.metadata['content_type'] = get_content_type(filename)
|
||||
self.metadata['ver'] = Metadata.current_version
|
||||
|
||||
def _show_publish_error(self, err):
|
||||
log.info(err.getTraceback())
|
||||
|
|
116
lbrynet/metadata/LBRYFee.py
Normal file
116
lbrynet/metadata/LBRYFee.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
import logging
|
||||
|
||||
from lbrynet.metadata.Validator import Validator, skip_validate
|
||||
from lbrynet.conf import CURRENCIES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def verify_supported_currency(fee):
|
||||
assert len(fee) == 1
|
||||
for c in fee:
|
||||
assert c in CURRENCIES
|
||||
return True
|
||||
|
||||
|
||||
def verify_amount(x):
|
||||
return isinstance(x, float) or isinstance(x, int) and x > 0
|
||||
|
||||
|
||||
class LBCFeeValidator(Validator):
|
||||
FV001 = "0.0.1"
|
||||
CURRENT_FEE_VERSION = FV001
|
||||
|
||||
FEE_REVISIONS = {}
|
||||
|
||||
FEE_REVISIONS[FV001] = [
|
||||
(Validator.REQUIRE, 'amount', verify_amount),
|
||||
(Validator.REQUIRE, 'address', skip_validate),
|
||||
]
|
||||
|
||||
FEE_MIGRATIONS = []
|
||||
|
||||
current_version = CURRENT_FEE_VERSION
|
||||
versions = FEE_REVISIONS
|
||||
migrations = FEE_MIGRATIONS
|
||||
|
||||
def __init__(self, fee):
|
||||
Validator.__init__(self, fee)
|
||||
|
||||
|
||||
class BTCFeeValidator(Validator):
|
||||
FV001 = "0.0.1"
|
||||
CURRENT_FEE_VERSION = FV001
|
||||
|
||||
FEE_REVISIONS = {}
|
||||
|
||||
FEE_REVISIONS[FV001] = [
|
||||
(Validator.REQUIRE, 'amount',verify_amount),
|
||||
(Validator.REQUIRE, 'address', skip_validate),
|
||||
]
|
||||
|
||||
FEE_MIGRATIONS = []
|
||||
|
||||
current_version = CURRENT_FEE_VERSION
|
||||
versions = FEE_REVISIONS
|
||||
migrations = FEE_MIGRATIONS
|
||||
|
||||
def __init__(self, fee):
|
||||
Validator.__init__(self, fee)
|
||||
|
||||
|
||||
class USDFeeValidator(Validator):
|
||||
FV001 = "0.0.1"
|
||||
CURRENT_FEE_VERSION = FV001
|
||||
|
||||
FEE_REVISIONS = {}
|
||||
|
||||
FEE_REVISIONS[FV001] = [
|
||||
(Validator.REQUIRE, 'amount',verify_amount),
|
||||
(Validator.REQUIRE, 'address', skip_validate),
|
||||
]
|
||||
|
||||
FEE_MIGRATIONS = []
|
||||
|
||||
current_version = CURRENT_FEE_VERSION
|
||||
versions = FEE_REVISIONS
|
||||
migrations = FEE_MIGRATIONS
|
||||
|
||||
def __init__(self, fee):
|
||||
Validator.__init__(self, fee)
|
||||
|
||||
|
||||
class LBRYFeeValidator(Validator):
|
||||
CV001 = "0.0.1"
|
||||
CURRENT_CURRENCY_VERSION = CV001
|
||||
|
||||
CURRENCY_REVISIONS = {}
|
||||
|
||||
CURRENCY_REVISIONS[CV001] = [
|
||||
(Validator.OPTIONAL, 'BTC', BTCFeeValidator.validate),
|
||||
(Validator.OPTIONAL, 'USD', USDFeeValidator.validate),
|
||||
(Validator.OPTIONAL, 'LBC', LBCFeeValidator.validate),
|
||||
]
|
||||
|
||||
CURRENCY_MIGRATIONS = []
|
||||
|
||||
current_version = CURRENT_CURRENCY_VERSION
|
||||
versions = CURRENCY_REVISIONS
|
||||
migrations = CURRENCY_MIGRATIONS
|
||||
|
||||
def __init__(self, fee_dict):
|
||||
Validator.__init__(self, fee_dict)
|
||||
self.currency_symbol = self.keys()[0]
|
||||
self.amount = self._get_amount()
|
||||
self.address = self[self.currency_symbol]['address']
|
||||
|
||||
def _get_amount(self):
|
||||
amt = self[self.currency_symbol]['amount']
|
||||
if isinstance(amt, float):
|
||||
return amt
|
||||
else:
|
||||
try:
|
||||
return float(amt)
|
||||
except TypeError:
|
||||
log.error('Failed to convert %s to float', amt)
|
||||
raise
|
86
lbrynet/metadata/LBRYMetadata.py
Normal file
86
lbrynet/metadata/LBRYMetadata.py
Normal file
|
@ -0,0 +1,86 @@
|
|||
import logging
|
||||
|
||||
from lbrynet.metadata.Validator import Validator, skip_validate
|
||||
from lbrynet.metadata.LBRYFee import LBRYFeeValidator, verify_supported_currency
|
||||
from lbrynet.conf import SOURCE_TYPES
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
NAME_ALLOWED_CHARSET = 'abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0987654321-'
|
||||
|
||||
|
||||
def verify_name_characters(name):
|
||||
for c in name:
|
||||
assert c in NAME_ALLOWED_CHARSET, "Invalid character"
|
||||
return True
|
||||
|
||||
|
||||
def validate_sources(sources):
|
||||
for source in sources:
|
||||
assert source in SOURCE_TYPES, "Unknown source type: %s" % str(source)
|
||||
return True
|
||||
|
||||
|
||||
class Metadata(Validator):
|
||||
MV001 = "0.0.1"
|
||||
MV002 = "0.0.2"
|
||||
MV003 = "0.0.3"
|
||||
CURRENT_METADATA_VERSION = MV003
|
||||
|
||||
METADATA_REVISIONS = {}
|
||||
|
||||
METADATA_REVISIONS[MV001] = [
|
||||
(Validator.REQUIRE, 'title', skip_validate),
|
||||
(Validator.REQUIRE, 'description', skip_validate),
|
||||
(Validator.REQUIRE, 'author', skip_validate),
|
||||
(Validator.REQUIRE, 'language', skip_validate),
|
||||
(Validator.REQUIRE, 'license', skip_validate),
|
||||
(Validator.REQUIRE, 'content-type', skip_validate),
|
||||
(Validator.REQUIRE, 'sources', validate_sources),
|
||||
(Validator.OPTIONAL, 'thumbnail', skip_validate),
|
||||
(Validator.OPTIONAL, 'preview', skip_validate),
|
||||
(Validator.OPTIONAL, 'fee', verify_supported_currency),
|
||||
(Validator.OPTIONAL, 'contact', skip_validate),
|
||||
(Validator.OPTIONAL, 'pubkey', skip_validate),
|
||||
]
|
||||
|
||||
METADATA_REVISIONS[MV002] = [
|
||||
(Validator.REQUIRE, 'nsfw', skip_validate),
|
||||
(Validator.REQUIRE, 'ver', skip_validate),
|
||||
(Validator.OPTIONAL, 'license_url', skip_validate),
|
||||
]
|
||||
|
||||
METADATA_REVISIONS[MV003] = [
|
||||
(Validator.REQUIRE, 'content_type', skip_validate),
|
||||
(Validator.SKIP, 'content-type'),
|
||||
(Validator.OPTIONAL, 'sig', skip_validate),
|
||||
(Validator.IF_KEY, 'sig', (Validator.REQUIRE, 'pubkey', skip_validate), Validator.DO_NOTHING),
|
||||
(Validator.IF_KEY, 'pubkey', (Validator.REQUIRE, 'sig', skip_validate), Validator.DO_NOTHING),
|
||||
]
|
||||
|
||||
MIGRATE_MV001_TO_MV002 = [
|
||||
(Validator.IF_KEY, 'nsfw', Validator.DO_NOTHING, (Validator.LOAD, 'nsfw', False)),
|
||||
(Validator.IF_KEY, 'ver', Validator.DO_NOTHING, (Validator.LOAD, 'ver', MV002)),
|
||||
]
|
||||
|
||||
MIGRATE_MV002_TO_MV003 = [
|
||||
(Validator.IF_KEY, 'content-type', (Validator.UPDATE, 'content-type', 'content_type'), Validator.DO_NOTHING),
|
||||
(Validator.IF_VAL, 'ver', MV002, (Validator.LOAD, 'ver', MV003), Validator.DO_NOTHING),
|
||||
]
|
||||
|
||||
METADATA_MIGRATIONS = [
|
||||
MIGRATE_MV001_TO_MV002,
|
||||
MIGRATE_MV002_TO_MV003,
|
||||
]
|
||||
|
||||
current_version = CURRENT_METADATA_VERSION
|
||||
versions = METADATA_REVISIONS
|
||||
migrations = METADATA_MIGRATIONS
|
||||
|
||||
def __init__(self, metadata, process_now=True):
|
||||
Validator.__init__(self, metadata, process_now)
|
||||
self.meta_version = self.get('ver', Metadata.MV001)
|
||||
self._load_fee()
|
||||
|
||||
def _load_fee(self):
|
||||
if 'fee' in self:
|
||||
self.update({'fee': LBRYFeeValidator(self['fee'])})
|
155
lbrynet/metadata/Validator.py
Normal file
155
lbrynet/metadata/Validator.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
import json
|
||||
import logging
|
||||
from copy import deepcopy
|
||||
from distutils.version import StrictVersion
|
||||
from lbrynet.core.utils import version_is_greater_than
|
||||
|
||||
log = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def skip_validate(value):
|
||||
return True
|
||||
|
||||
|
||||
def processor(cls):
|
||||
for methodname in dir(cls):
|
||||
method = getattr(cls, methodname)
|
||||
if hasattr(method, 'cmd_name'):
|
||||
cls.commands.update({method.cmd_name: methodname})
|
||||
return cls
|
||||
|
||||
|
||||
def cmd(cmd_name):
|
||||
def wrapper(func):
|
||||
func.cmd_name = cmd_name
|
||||
return func
|
||||
return wrapper
|
||||
|
||||
|
||||
@processor
|
||||
class Validator(dict):
|
||||
"""
|
||||
Base class for validated dictionaries
|
||||
"""
|
||||
|
||||
# override these
|
||||
current_version = None
|
||||
versions = {}
|
||||
migrations = []
|
||||
|
||||
# built in commands
|
||||
DO_NOTHING = "do_nothing"
|
||||
UPDATE = "update_key"
|
||||
IF_KEY = "if_key"
|
||||
REQUIRE = "require"
|
||||
SKIP = "skip"
|
||||
OPTIONAL = "optional"
|
||||
LOAD = "load"
|
||||
IF_VAL = "if_val"
|
||||
|
||||
commands = {}
|
||||
|
||||
@classmethod
|
||||
def load_from_hex(cls, hex_val):
|
||||
return cls(json.loads(hex_val.decode('hex')))
|
||||
|
||||
@classmethod
|
||||
def validate(cls, value):
|
||||
if cls(value):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
|
||||
def __init__(self, value, process_now=False):
|
||||
dict.__init__(self)
|
||||
self._skip = []
|
||||
value_to_load = deepcopy(value)
|
||||
if process_now:
|
||||
self.process(value_to_load)
|
||||
self._verify_value(value_to_load)
|
||||
self.version = self.get('ver', "0.0.1")
|
||||
|
||||
def process(self, value):
|
||||
if self.migrations is not None:
|
||||
self._migrate_value(value)
|
||||
|
||||
@cmd(DO_NOTHING)
|
||||
def _do_nothing(self):
|
||||
pass
|
||||
|
||||
@cmd(SKIP)
|
||||
def _add_to_skipped(self, rx_value, key):
|
||||
if key not in self._skip:
|
||||
self._skip.append(key)
|
||||
|
||||
@cmd(UPDATE)
|
||||
def _update(self, rx_value, old_key, new_key):
|
||||
rx_value.update({new_key: rx_value.pop(old_key)})
|
||||
|
||||
@cmd(IF_KEY)
|
||||
def _if_key(self, rx_value, key, if_true, if_else):
|
||||
if key in rx_value:
|
||||
return self._handle(if_true, rx_value)
|
||||
return self._handle(if_else, rx_value)
|
||||
|
||||
@cmd(IF_VAL)
|
||||
def _if_val(self, rx_value, key, val, if_true, if_else):
|
||||
if key in rx_value:
|
||||
if rx_value[key] == val:
|
||||
return self._handle(if_true, rx_value)
|
||||
return self._handle(if_else, rx_value)
|
||||
|
||||
@cmd(LOAD)
|
||||
def _load(self, rx_value, key, value):
|
||||
rx_value.update({key: value})
|
||||
|
||||
@cmd(REQUIRE)
|
||||
def _require(self, rx_value, key, validator=None):
|
||||
if key not in self._skip:
|
||||
assert key in rx_value, "Key is missing: %s" % key
|
||||
if isinstance(validator, type):
|
||||
assert isinstance(rx_value[key], validator), "%s: %s isn't required %s" % (key, type(rx_value[key]), validator)
|
||||
elif callable(validator):
|
||||
assert validator(rx_value[key]), "Failed to validate %s" % key
|
||||
self.update({key: rx_value.pop(key)})
|
||||
|
||||
@cmd(OPTIONAL)
|
||||
def _optional(self, rx_value, key, validator=None):
|
||||
if key in rx_value and key not in self._skip:
|
||||
if isinstance(validator, type):
|
||||
assert isinstance(rx_value[key], validator), "%s type %s isn't required %s" % (key, type(rx_value[key]), validator)
|
||||
elif callable(validator):
|
||||
assert validator(rx_value[key]), "Failed to validate %s" % key
|
||||
self.update({key: rx_value.pop(key)})
|
||||
|
||||
def _handle(self, cmd_tpl, value):
|
||||
if cmd_tpl == Validator.DO_NOTHING:
|
||||
return
|
||||
command = cmd_tpl[0]
|
||||
f = getattr(self, self.commands[command])
|
||||
if len(cmd_tpl) > 1:
|
||||
args = (value,) + cmd_tpl[1:]
|
||||
f(*args)
|
||||
else:
|
||||
f()
|
||||
|
||||
def _load_revision(self, version, value):
|
||||
for k in self.versions[version]:
|
||||
self._handle(k, value)
|
||||
|
||||
def _verify_value(self, value):
|
||||
val_ver = value.get('ver', "0.0.1")
|
||||
# verify version requirements in reverse order starting from the version asserted in the value
|
||||
versions = sorted([v for v in self.versions if not version_is_greater_than(v, val_ver)], key=StrictVersion, reverse=True)
|
||||
for version in versions:
|
||||
self._load_revision(version, value)
|
||||
assert value == {} or value.keys() == self._skip, "Unknown keys: %s" % json.dumps(value)
|
||||
|
||||
def _migrate_value(self, value):
|
||||
for migration in self.migrations:
|
||||
self._run_migration(migration, value)
|
||||
|
||||
def _run_migration(self, commands, value):
|
||||
for cmd in commands:
|
||||
self._handle(cmd, value)
|
||||
|
|
@ -1,2 +1,3 @@
|
|||
from lbrynet.reflector.server.server import ReflectorServerFactory as ServerFactory
|
||||
from lbrynet.reflector.client.client import LBRYFileReflectorClientFactory as ClientFactory
|
||||
from lbrynet.reflector.client.client import LBRYBlobReflectorClientFactory as BlobClientFactory
|
||||
|
|
|
@ -267,3 +267,188 @@ class LBRYFileReflectorClientFactory(ClientFactory):
|
|||
|
||||
def clientConnectionFailed(self, connector, reason):
|
||||
log.debug("connection failed: %s", reason)
|
||||
|
||||
|
||||
class LBRYBlobReflectorClient(Protocol):
|
||||
# Protocol stuff
|
||||
|
||||
def connectionMade(self):
|
||||
self.blob_manager = self.factory.blob_manager
|
||||
self.response_buff = ''
|
||||
self.outgoing_buff = ''
|
||||
self.blob_hashes_to_send = self.factory.blobs
|
||||
self.next_blob_to_send = None
|
||||
self.blob_read_handle = None
|
||||
self.received_handshake_response = False
|
||||
self.protocol_version = None
|
||||
self.file_sender = None
|
||||
self.producer = None
|
||||
self.streaming = False
|
||||
d = self.send_handshake()
|
||||
d.addErrback(lambda err: log.warning("An error occurred immediately: %s", err.getTraceback()))
|
||||
|
||||
def dataReceived(self, data):
|
||||
log.debug('Recieved %s', data)
|
||||
self.response_buff += data
|
||||
try:
|
||||
msg = self.parse_response(self.response_buff)
|
||||
except IncompleteResponseError:
|
||||
pass
|
||||
else:
|
||||
self.response_buff = ''
|
||||
d = self.handle_response(msg)
|
||||
d.addCallback(lambda _: self.send_next_request())
|
||||
d.addErrback(self.response_failure_handler)
|
||||
|
||||
def connectionLost(self, reason):
|
||||
if reason.check(error.ConnectionDone):
|
||||
log.debug('Finished sending data via reflector')
|
||||
self.factory.finished_deferred.callback(True)
|
||||
else:
|
||||
log.debug('reflector finished: %s', reason)
|
||||
self.factory.finished_deferred.callback(reason)
|
||||
|
||||
# IConsumer stuff
|
||||
|
||||
def registerProducer(self, producer, streaming):
|
||||
self.producer = producer
|
||||
self.streaming = streaming
|
||||
if self.streaming is False:
|
||||
from twisted.internet import reactor
|
||||
reactor.callLater(0, self.producer.resumeProducing)
|
||||
|
||||
def unregisterProducer(self):
|
||||
self.producer = None
|
||||
|
||||
def write(self, data):
|
||||
self.transport.write(data)
|
||||
if self.producer is not None and self.streaming is False:
|
||||
from twisted.internet import reactor
|
||||
reactor.callLater(0, self.producer.resumeProducing)
|
||||
|
||||
def send_handshake(self):
|
||||
log.debug('Sending handshake')
|
||||
self.write(json.dumps({'version': 0}))
|
||||
return defer.succeed(None)
|
||||
|
||||
def parse_response(self, buff):
|
||||
try:
|
||||
return json.loads(buff)
|
||||
except ValueError:
|
||||
raise IncompleteResponseError()
|
||||
|
||||
def response_failure_handler(self, err):
|
||||
log.warning("An error occurred handling the response: %s", err.getTraceback())
|
||||
|
||||
def handle_response(self, response_dict):
|
||||
if self.received_handshake_response is False:
|
||||
return self.handle_handshake_response(response_dict)
|
||||
else:
|
||||
return self.handle_normal_response(response_dict)
|
||||
|
||||
def set_not_uploading(self):
|
||||
if self.next_blob_to_send is not None:
|
||||
self.next_blob_to_send.close_read_handle(self.read_handle)
|
||||
self.read_handle = None
|
||||
self.next_blob_to_send = None
|
||||
self.file_sender = None
|
||||
return defer.succeed(None)
|
||||
|
||||
def start_transfer(self):
|
||||
self.write(json.dumps({}))
|
||||
assert self.read_handle is not None, "self.read_handle was None when trying to start the transfer"
|
||||
d = self.file_sender.beginFileTransfer(self.read_handle, self)
|
||||
return d
|
||||
|
||||
def handle_handshake_response(self, response_dict):
|
||||
if 'version' not in response_dict:
|
||||
raise ValueError("Need protocol version number!")
|
||||
self.protocol_version = int(response_dict['version'])
|
||||
if self.protocol_version != 0:
|
||||
raise ValueError("I can't handle protocol version {}!".format(self.protocol_version))
|
||||
self.received_handshake_response = True
|
||||
return defer.succeed(True)
|
||||
|
||||
def handle_normal_response(self, response_dict):
|
||||
if self.file_sender is None: # Expecting Server Info Response
|
||||
if 'send_blob' not in response_dict:
|
||||
raise ValueError("I don't know whether to send the blob or not!")
|
||||
if response_dict['send_blob'] is True:
|
||||
self.file_sender = FileSender()
|
||||
return defer.succeed(True)
|
||||
else:
|
||||
return self.set_not_uploading()
|
||||
else: # Expecting Server Blob Response
|
||||
if 'received_blob' not in response_dict:
|
||||
raise ValueError("I don't know if the blob made it to the intended destination!")
|
||||
else:
|
||||
return self.set_not_uploading()
|
||||
|
||||
def open_blob_for_reading(self, blob):
|
||||
if blob.is_validated():
|
||||
read_handle = blob.open_for_reading()
|
||||
if read_handle is not None:
|
||||
log.debug('Getting ready to send %s', blob.blob_hash)
|
||||
self.next_blob_to_send = blob
|
||||
self.read_handle = read_handle
|
||||
return None
|
||||
raise ValueError("Couldn't open that blob for some reason. blob_hash: {}".format(blob.blob_hash))
|
||||
|
||||
def send_blob_info(self):
|
||||
log.info("Send blob info for %s", self.next_blob_to_send.blob_hash)
|
||||
assert self.next_blob_to_send is not None, "need to have a next blob to send at this point"
|
||||
log.debug('sending blob info')
|
||||
self.write(json.dumps({
|
||||
'blob_hash': self.next_blob_to_send.blob_hash,
|
||||
'blob_size': self.next_blob_to_send.length
|
||||
}))
|
||||
|
||||
def send_next_request(self):
|
||||
if self.file_sender is not None:
|
||||
# send the blob
|
||||
log.debug('Sending the blob')
|
||||
return self.start_transfer()
|
||||
elif self.blob_hashes_to_send:
|
||||
# open the next blob to send
|
||||
blob_hash = self.blob_hashes_to_send[0]
|
||||
log.debug('No current blob, sending the next one: %s', blob_hash)
|
||||
self.blob_hashes_to_send = self.blob_hashes_to_send[1:]
|
||||
d = self.blob_manager.get_blob(blob_hash, True)
|
||||
d.addCallback(self.open_blob_for_reading)
|
||||
# send the server the next blob hash + length
|
||||
d.addCallback(lambda _: self.send_blob_info())
|
||||
return d
|
||||
else:
|
||||
# close connection
|
||||
log.debug('No more blob hashes, closing connection')
|
||||
self.transport.loseConnection()
|
||||
|
||||
|
||||
class LBRYBlobReflectorClientFactory(ClientFactory):
|
||||
protocol = LBRYBlobReflectorClient
|
||||
|
||||
def __init__(self, blob_manager, blobs):
|
||||
self.blob_manager = blob_manager
|
||||
self.blobs = blobs
|
||||
self.p = None
|
||||
self.finished_deferred = defer.Deferred()
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
p = self.protocol()
|
||||
p.factory = self
|
||||
self.p = p
|
||||
return p
|
||||
|
||||
def startFactory(self):
|
||||
log.debug('Starting reflector factory')
|
||||
ClientFactory.startFactory(self)
|
||||
|
||||
def startedConnecting(self, connector):
|
||||
log.debug('Started connecting')
|
||||
|
||||
def clientConnectionLost(self, connector, reason):
|
||||
"""If we get disconnected, reconnect to server."""
|
||||
log.debug("connection lost: %s", reason)
|
||||
|
||||
def clientConnectionFailed(self, connector, reason):
|
||||
log.debug("connection failed: %s", reason)
|
||||
|
|
|
@ -3,7 +3,6 @@ from twisted.python import failure
|
|||
from twisted.internet import error, defer
|
||||
from twisted.internet.protocol import Protocol, ServerFactory
|
||||
import json
|
||||
|
||||
from lbrynet.core.utils import is_valid_blobhash
|
||||
|
||||
|
||||
|
|
0
packaging/__init__.py
Normal file
0
packaging/__init__.py
Normal file
75
packaging/osx/install_lbry_source.sh
Normal file
75
packaging/osx/install_lbry_source.sh
Normal file
|
@ -0,0 +1,75 @@
|
|||
#!/bin/sh
|
||||
|
||||
if [ "$EUID" -ne 0 ]
|
||||
then echo "Please run as sudo"
|
||||
exit
|
||||
fi
|
||||
|
||||
echo "**********************************"
|
||||
echo "Installing LBRY and dependencies"
|
||||
echo "**********************************"
|
||||
|
||||
if ! xcode-select -p &>/dev/null; then
|
||||
echo
|
||||
echo "You need to install xcode command line tools to install lbry."
|
||||
echo "A popup to do so should appear, once you're done the installer will resume"
|
||||
echo
|
||||
xcode-select --install &>/dev/null
|
||||
while ! xcode-select -p &>/dev/null; do
|
||||
sleep 1
|
||||
done
|
||||
echo "Installed xcode command line tools"
|
||||
else
|
||||
echo "Xcode command line tools already installed..."
|
||||
fi
|
||||
|
||||
if ! which brew &>/dev/null; then
|
||||
echo "Installing brew..."
|
||||
sudo -u ${SUDO_USER} ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" < /dev/null &>/dev/null
|
||||
else
|
||||
echo "Updating brew..."
|
||||
sudo -u ${SUDO_USER} brew update &>/dev/null
|
||||
fi
|
||||
|
||||
if ! brew list mpfr &>/dev/null; then
|
||||
echo "Installing mpfr..."
|
||||
sudo -u ${SUDO_USER} brew install mpfr &>/dev/null
|
||||
else
|
||||
echo "mpfr already installed..."
|
||||
fi
|
||||
|
||||
if ! brew list libmpc &>/dev/null; then
|
||||
echo "Installing libmpc..."
|
||||
sudo -u ${SUDO_USER} brew install libmpc &>/dev/null
|
||||
else
|
||||
echo "libmpc already installed..."
|
||||
fi
|
||||
|
||||
if ! which pip &>/dev/null; then
|
||||
echo "Installing pip..."
|
||||
sudo easy_install pip &>/dev/null
|
||||
else
|
||||
echo "pip already installed"
|
||||
fi
|
||||
|
||||
echo "Cloning and installing lbryum..."
|
||||
git clone --depth 1 https://github.com/lbryio/lbryum.git &>/dev/null
|
||||
cd lbryum
|
||||
sudo python setup.py install &>/dev/null
|
||||
cd ..
|
||||
rm -rf lbryum &>/dev/null
|
||||
|
||||
echo "Cloning and installing lbry..."
|
||||
git clone --depth 1 https://github.com/lbryio/lbry.git &>/dev/null
|
||||
cd lbry
|
||||
sudo python setup.py install &>/dev/null
|
||||
cd ..
|
||||
rm -rf lbry &>/dev/null
|
||||
|
||||
sudo chmod -R 755 /Library/Python/2.7/site-packages/
|
||||
|
||||
echo "**********************************"
|
||||
echo "All done!"
|
||||
echo "**********************************"
|
||||
echo " "
|
||||
echo "run 'lbrynet-daemon' to start lbry"
|
|
@ -1,5 +1,5 @@
|
|||
[Desktop Entry]
|
||||
Version=0.3.22
|
||||
Version=0.4.0
|
||||
Name=LBRY
|
||||
Comment=The world's first user-owned content marketplace
|
||||
Icon=lbry
|
||||
|
|
0
packaging/uri_handler/__init__.py
Normal file
0
packaging/uri_handler/__init__.py
Normal file
|
@ -4,8 +4,8 @@ import socket
|
|||
import sys
|
||||
import threading
|
||||
import webbrowser
|
||||
import win32api
|
||||
|
||||
import win32api
|
||||
import win32con
|
||||
import win32gui_struct
|
||||
from jsonrpc.proxy import JSONRPCProxy
|
||||
|
@ -20,7 +20,7 @@ except ImportError:
|
|||
from lbrynet.lbrynet_daemon.LBRYDaemonServer import LBRYDaemonServer, LBRYDaemonRequest
|
||||
from lbrynet.conf import API_PORT, API_INTERFACE, ICON_PATH, APP_NAME
|
||||
from lbrynet.conf import UI_ADDRESS, API_CONNECTION_STRING
|
||||
from lbrynet.packaging.LBRYURIHandler import LBRYURIHandler
|
||||
from packaging.uri_handler.LBRYURIHandler import LBRYURIHandler
|
||||
|
||||
|
||||
if getattr(sys, 'frozen', False) and os.name == "nt":
|
||||
|
|
6
setup.py
6
setup.py
|
@ -25,7 +25,11 @@ console_scripts = ['lbrynet-stdin-uploader = lbrynet.lbrynet_console.LBRYStdinUp
|
|||
|
||||
requires = ['pycrypto', 'twisted', 'miniupnpc', 'yapsy', 'seccure',
|
||||
'python-bitcoinrpc==0.1', 'txJSON-RPC', 'requests>=2.4.2', 'unqlite==0.2.0',
|
||||
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance', 'requests_futures']
|
||||
'leveldb', 'lbryum', 'jsonrpc', 'simplejson', 'appdirs', 'six==1.9.0', 'base58', 'googlefinance',
|
||||
'requests_futures']
|
||||
|
||||
if sys.platform.startswith("linux"):
|
||||
requires.append('service-identity')
|
||||
|
||||
setup(name='lbrynet',
|
||||
description='A decentralized media library and marketplace',
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
import mock
|
||||
from lbrynet.core import LBRYMetadata
|
||||
from lbrynet.metadata import LBRYMetadata
|
||||
from lbrynet.lbrynet_daemon import LBRYExchangeRateManager
|
||||
|
||||
from twisted.trial import unittest
|
||||
|
|
|
@ -1,13 +1,26 @@
|
|||
from lbrynet.core import LBRYMetadata
|
||||
from lbrynet.metadata import LBRYMetadata
|
||||
from twisted.trial import unittest
|
||||
|
||||
|
||||
class MetadataTest(unittest.TestCase):
|
||||
def test_assertion_if_source_is_missing(self):
|
||||
def test_assertion_if_no_metadata(self):
|
||||
metadata = {}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata)
|
||||
|
||||
def test_assertion_if_source_is_missing(self):
|
||||
metadata = {
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
'description': 'Four couples meet for Sunday brunch only to discover they are stuck in a house together as the world may be about to end.',
|
||||
'language': 'en',
|
||||
'title': "It's a Disaster",
|
||||
'author': 'Written and directed by Todd Berger',
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata)
|
||||
|
||||
def test_metadata_works_without_fee(self):
|
||||
metadata = {
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
|
@ -18,15 +31,14 @@ class MetadataTest(unittest.TestCase):
|
|||
'sources': {
|
||||
'lbry_sd_hash': '8d0d6ea64d09f5aa90faf5807d8a761c32a27047861e06f81f41e35623a348a4b0104052161d5f89cf190f9672bc4ead'},
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg'
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata)
|
||||
self.assertFalse('key' in m)
|
||||
self.assertFalse('fee' in m)
|
||||
|
||||
def test_assertion_if_invalid_source(self):
|
||||
metadata = {
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
'fee': {'LBC': {'amount': 50.0, 'address': 'bRQJASJrDbFZVAvcpv3NoNWoH74LQd5JNV'}},
|
||||
'description': 'Four couples meet for Sunday brunch only to discover they are stuck in a house together as the world may be about to end.',
|
||||
'language': 'en',
|
||||
'title': "It's a Disaster",
|
||||
|
@ -34,7 +46,7 @@ class MetadataTest(unittest.TestCase):
|
|||
'sources': {
|
||||
'fake': 'source'},
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg'
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata)
|
||||
|
@ -57,7 +69,6 @@ class MetadataTest(unittest.TestCase):
|
|||
def test_version_is_001_if_all_fields_are_present(self):
|
||||
metadata = {
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
'fee': {'LBC': {'amount': 50.0, 'address': 'bRQJASJrDbFZVAvcpv3NoNWoH74LQd5JNV'}},
|
||||
'description': 'Four couples meet for Sunday brunch only to discover they are stuck in a house together as the world may be about to end.',
|
||||
'language': 'en',
|
||||
'title': "It's a Disaster",
|
||||
|
@ -65,29 +76,26 @@ class MetadataTest(unittest.TestCase):
|
|||
'sources': {
|
||||
'lbry_sd_hash': '8d0d6ea64d09f5aa90faf5807d8a761c32a27047861e06f81f41e35623a348a4b0104052161d5f89cf190f9672bc4ead'},
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg'
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata)
|
||||
self.assertEquals('0.0.1', m.meta_version)
|
||||
m = LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
self.assertEquals('0.0.1', m.version)
|
||||
|
||||
def test_assertion_if_there_is_an_extra_field(self):
|
||||
metadata = {
|
||||
'license': 'NASA',
|
||||
'fee': {'USD': {'amount': 0.01, 'address': 'baBYSK7CqGSn5KrEmNmmQwAhBSFgo6v47z'}},
|
||||
'ver': '0.0.2',
|
||||
'description': 'SDO captures images of the sun in 10 different wavelengths, each of which helps highlight a different temperature of solar material. Different temperatures can, in turn, show specific structures on the sun such as solar flares, which are gigantic explosions of light and x-rays, or coronal loops, which are stream of solar material travelling up and down looping magnetic field lines',
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
'description': 'Four couples meet for Sunday brunch only to discover they are stuck in a house together as the world may be about to end.',
|
||||
'language': 'en',
|
||||
'author': 'The SDO Team, Genna Duberstein and Scott Wiessinger',
|
||||
'title': 'Thermonuclear Art',
|
||||
'title': "It's a Disaster",
|
||||
'author': 'Written and directed by Todd Berger',
|
||||
'sources': {
|
||||
'lbry_sd_hash': '8655f713819344980a9a0d67b198344e2c462c90f813e86f0c63789ab0868031f25c54d0bb31af6658e997e2041806eb'},
|
||||
'nsfw': False,
|
||||
'content-type': 'video/mp4',
|
||||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg',
|
||||
'lbry_sd_hash': '8d0d6ea64d09f5aa90faf5807d8a761c32a27047861e06f81f41e35623a348a4b0104052161d5f89cf190f9672bc4ead'},
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
'MYSTERYFIELD': '?'
|
||||
}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata)
|
||||
LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
|
||||
def test_version_is_002_if_all_fields_are_present(self):
|
||||
metadata = {
|
||||
|
@ -104,8 +112,26 @@ class MetadataTest(unittest.TestCase):
|
|||
'content-type': 'video/mp4',
|
||||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg'
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata)
|
||||
self.assertEquals('0.0.2', m.meta_version)
|
||||
m = LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
self.assertEquals('0.0.2', m.version)
|
||||
|
||||
def test_version_is_003_if_all_fields_are_present(self):
|
||||
metadata = {
|
||||
'license': 'NASA',
|
||||
'fee': {'USD': {'amount': 0.01, 'address': 'baBYSK7CqGSn5KrEmNmmQwAhBSFgo6v47z'}},
|
||||
'ver': '0.0.3',
|
||||
'description': 'SDO captures images of the sun in 10 different wavelengths, each of which helps highlight a different temperature of solar material. Different temperatures can, in turn, show specific structures on the sun such as solar flares, which are gigantic explosions of light and x-rays, or coronal loops, which are stream of solar material travelling up and down looping magnetic field lines',
|
||||
'language': 'en',
|
||||
'author': 'The SDO Team, Genna Duberstein and Scott Wiessinger',
|
||||
'title': 'Thermonuclear Art',
|
||||
'sources': {
|
||||
'lbry_sd_hash': '8655f713819344980a9a0d67b198344e2c462c90f813e86f0c63789ab0868031f25c54d0bb31af6658e997e2041806eb'},
|
||||
'nsfw': False,
|
||||
'content_type': 'video/mp4',
|
||||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg'
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
self.assertEquals('0.0.3', m.version)
|
||||
|
||||
def test_version_claimed_is_001_but_version_is_002(self):
|
||||
metadata = {
|
||||
|
@ -123,7 +149,55 @@ class MetadataTest(unittest.TestCase):
|
|||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg'
|
||||
}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata)
|
||||
LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
|
||||
def test_version_claimed_is_002_but_version_is_003(self):
|
||||
metadata = {
|
||||
'license': 'NASA',
|
||||
'fee': {'USD': {'amount': 0.01, 'address': 'baBYSK7CqGSn5KrEmNmmQwAhBSFgo6v47z'}},
|
||||
'ver': '0.0.2',
|
||||
'description': 'SDO captures images of the sun in 10 different wavelengths, each of which helps highlight a different temperature of solar material. Different temperatures can, in turn, show specific structures on the sun such as solar flares, which are gigantic explosions of light and x-rays, or coronal loops, which are stream of solar material travelling up and down looping magnetic field lines',
|
||||
'language': 'en',
|
||||
'author': 'The SDO Team, Genna Duberstein and Scott Wiessinger',
|
||||
'title': 'Thermonuclear Art',
|
||||
'sources': {
|
||||
'lbry_sd_hash': '8655f713819344980a9a0d67b198344e2c462c90f813e86f0c63789ab0868031f25c54d0bb31af6658e997e2041806eb'},
|
||||
'nsfw': False,
|
||||
'content_type': 'video/mp4',
|
||||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg'
|
||||
}
|
||||
with self.assertRaises(AssertionError):
|
||||
LBRYMetadata.Metadata(metadata, process_now=False)
|
||||
|
||||
def test_version_001_ports_to_003(self):
|
||||
metadata = {
|
||||
'license': 'Oscilloscope Laboratories',
|
||||
'description': 'Four couples meet for Sunday brunch only to discover they are stuck in a house together as the world may be about to end.',
|
||||
'language': 'en',
|
||||
'title': "It's a Disaster",
|
||||
'author': 'Written and directed by Todd Berger',
|
||||
'sources': {
|
||||
'lbry_sd_hash': '8d0d6ea64d09f5aa90faf5807d8a761c32a27047861e06f81f41e35623a348a4b0104052161d5f89cf190f9672bc4ead'},
|
||||
'content-type': 'audio/mpeg',
|
||||
'thumbnail': 'http://ia.media-imdb.com/images/M/MV5BMTQwNjYzMTQ0Ml5BMl5BanBnXkFtZTcwNDUzODM5Nw@@._V1_SY1000_CR0,0,673,1000_AL_.jpg',
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata, process_now=True)
|
||||
self.assertEquals('0.0.3', m.version)
|
||||
|
||||
def test_version_002_ports_to_003(self):
|
||||
metadata = {
|
||||
'license': 'NASA',
|
||||
'fee': {'USD': {'amount': 0.01, 'address': 'baBYSK7CqGSn5KrEmNmmQwAhBSFgo6v47z'}},
|
||||
'ver': '0.0.2',
|
||||
'description': 'SDO captures images of the sun in 10 different wavelengths, each of which helps highlight a different temperature of solar material. Different temperatures can, in turn, show specific structures on the sun such as solar flares, which are gigantic explosions of light and x-rays, or coronal loops, which are stream of solar material travelling up and down looping magnetic field lines',
|
||||
'language': 'en',
|
||||
'author': 'The SDO Team, Genna Duberstein and Scott Wiessinger',
|
||||
'title': 'Thermonuclear Art',
|
||||
'sources': {
|
||||
'lbry_sd_hash': '8655f713819344980a9a0d67b198344e2c462c90f813e86f0c63789ab0868031f25c54d0bb31af6658e997e2041806eb'},
|
||||
'nsfw': False,
|
||||
'content-type': 'video/mp4',
|
||||
'thumbnail': 'https://svs.gsfc.nasa.gov/vis/a010000/a012000/a012034/Combined.00_08_16_17.Still004.jpg'
|
||||
}
|
||||
m = LBRYMetadata.Metadata(metadata, process_now=True)
|
||||
self.assertEquals('0.0.3', m.version)
|
Loading…
Reference in a new issue