Compare commits

..

3 commits

Author SHA1 Message Date
Victor Shyba
a394713171 wip 2019-09-08 23:23:14 -03:00
Victor Shyba
368c6ab4a0 move as_dict to json_encoder 2019-09-08 20:04:47 -03:00
Victor Shyba
907276045b remove dead code 2019-09-06 03:02:16 -03:00
494 changed files with 45185 additions and 43104 deletions

View file

@ -1,206 +0,0 @@
name: ci
on: ["push", "pull_request", "workflow_dispatch"]
jobs:
lint:
name: lint
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: extract pip cache
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
restore-keys: ${{ runner.os }}-pip-
- run: pip install --user --upgrade pip wheel
- run: pip install -e .[lint]
- run: make lint
tests-unit:
name: "tests / unit"
strategy:
matrix:
os:
- ubuntu-20.04
- macos-latest
- windows-latest
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- name: set pip cache dir
shell: bash
run: echo "PIP_CACHE_DIR=$(pip cache dir)" >> $GITHUB_ENV
- name: extract pip cache
uses: actions/cache@v3
with:
path: ${{ env.PIP_CACHE_DIR }}
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
restore-keys: ${{ runner.os }}-pip-
- id: os-name
uses: ASzc/change-string-case-action@v5
with:
string: ${{ runner.os }}
- run: python -m pip install --user --upgrade pip wheel
- if: startsWith(runner.os, 'linux')
run: pip install -e .[test]
- if: startsWith(runner.os, 'linux')
env:
HOME: /tmp
run: make test-unit-coverage
- if: startsWith(runner.os, 'linux') != true
run: pip install -e .[test]
- if: startsWith(runner.os, 'linux') != true
env:
HOME: /tmp
run: coverage run --source=lbry -m unittest tests/unit/test_conf.py
- name: submit coverage report
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: tests-unit-${{ steps.os-name.outputs.lowercase }}
COVERALLS_PARALLEL: true
run: |
pip install coveralls
coveralls --service=github
tests-integration:
name: "tests / integration"
runs-on: ubuntu-20.04
strategy:
matrix:
test:
- datanetwork
- blockchain
- claims
- takeovers
- transactions
- other
steps:
- name: Configure sysctl limits
run: |
sudo swapoff -a
sudo sysctl -w vm.swappiness=1
sudo sysctl -w fs.file-max=262144
sudo sysctl -w vm.max_map_count=262144
- name: Runs Elasticsearch
uses: elastic/elastic-github-actions/elasticsearch@master
with:
stack-version: 7.12.1
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- if: matrix.test == 'other'
run: |
sudo apt-get update
sudo apt-get install -y --no-install-recommends ffmpeg
- name: extract pip cache
uses: actions/cache@v3
with:
path: ./.tox
key: tox-integration-${{ matrix.test }}-${{ hashFiles('setup.py') }}
restore-keys: txo-integration-${{ matrix.test }}-
- run: pip install tox coverage coveralls
- if: matrix.test == 'claims'
run: rm -rf .tox
- run: tox -e ${{ matrix.test }}
- name: submit coverage report
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
COVERALLS_FLAG_NAME: tests-integration-${{ matrix.test }}
COVERALLS_PARALLEL: true
run: |
coverage combine tests
coveralls --service=github
coverage:
needs: ["tests-unit", "tests-integration"]
runs-on: ubuntu-20.04
steps:
- name: finalize coverage report submission
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: |
pip install coveralls
coveralls --service=github --finish
build:
needs: ["lint", "tests-unit", "tests-integration"]
name: "build / binary"
strategy:
matrix:
os:
- ubuntu-20.04
- macos-latest
- windows-latest
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: '3.9'
- id: os-name
uses: ASzc/change-string-case-action@v5
with:
string: ${{ runner.os }}
- name: set pip cache dir
shell: bash
run: echo "PIP_CACHE_DIR=$(pip cache dir)" >> $GITHUB_ENV
- name: extract pip cache
uses: actions/cache@v3
with:
path: ${{ env.PIP_CACHE_DIR }}
key: ${{ runner.os }}-pip-${{ hashFiles('setup.py') }}
restore-keys: ${{ runner.os }}-pip-
- run: pip install pyinstaller==4.6
- run: pip install -e .
- if: startsWith(github.ref, 'refs/tags/v')
run: python docker/set_build.py
- if: startsWith(runner.os, 'linux') || startsWith(runner.os, 'mac')
name: Build & Run (Unix)
run: |
pyinstaller --onefile --name lbrynet lbry/extras/cli.py
dist/lbrynet --version
- if: startsWith(runner.os, 'windows')
name: Build & Run (Windows)
run: |
pip install pywin32==301
pyinstaller --additional-hooks-dir=scripts/. --icon=icons/lbry256.ico --onefile --name lbrynet lbry/extras/cli.py
dist/lbrynet.exe --version
- uses: actions/upload-artifact@v3
with:
name: lbrynet-${{ steps.os-name.outputs.lowercase }}
path: dist/
release:
name: "release"
if: startsWith(github.ref, 'refs/tags/v')
needs: ["build"]
runs-on: ubuntu-20.04
steps:
- uses: actions/checkout@v1
- uses: actions/download-artifact@v2
- name: upload binaries
env:
GITHUB_TOKEN: ${{ secrets.RELEASE_API_TOKEN }}
run: |
pip install githubrelease
chmod +x lbrynet-macos/lbrynet
chmod +x lbrynet-linux/lbrynet
zip --junk-paths lbrynet-mac.zip lbrynet-macos/lbrynet
zip --junk-paths lbrynet-linux.zip lbrynet-linux/lbrynet
zip --junk-paths lbrynet-windows.zip lbrynet-windows/lbrynet.exe
ls -lh
githubrelease release lbryio/lbry-sdk info ${GITHUB_REF#refs/tags/}
githubrelease asset lbryio/lbry-sdk upload ${GITHUB_REF#refs/tags/} \
lbrynet-mac.zip lbrynet-linux.zip lbrynet-windows.zip
githubrelease release lbryio/lbry-sdk publish ${GITHUB_REF#refs/tags/}

View file

@ -1,22 +0,0 @@
name: slack
on:
release:
types: [published]
jobs:
release:
name: "slack notification"
runs-on: ubuntu-20.04
steps:
- uses: LoveToKnow/slackify-markdown-action@v1.0.0
id: markdown
with:
text: "There is a new SDK release: ${{github.event.release.html_url}}\n${{ github.event.release.body }}"
- uses: slackapi/slack-github-action@v1.14.0
env:
CHANGELOG: '<!channel> ${{ steps.markdown.outputs.text }}'
SLACK_WEBHOOK_URL: ${{ secrets.SLACK_RELEASE_BOT_WEBHOOK }}
with:
payload: '{"type": "mrkdwn", "text": ${{ toJSON(env.CHANGELOG) }} }'

20
.gitignore vendored
View file

@ -1,22 +1,2 @@
/.idea
/.DS_Store
/build
/dist
/.tox
/.coverage*
/lbry-venv
/venv
/lbry/blockchain
lbry.egg-info
__pycache__
_trial_temp/
trending*.log
/tests/integration/claims/files
/tests/.coverage.*
/lbry/wallet/bin
/.vscode
/.gitignore

146
.travis.yml Normal file
View file

@ -0,0 +1,146 @@
sudo: required
dist: xenial
language: python
python: "3.7"
jobs:
include:
- stage: code quality
name: "pylint & mypy"
install:
- make install
script: make lint
- stage: test
name: "LBRY Unit Tests"
install:
- make install
script:
- cd lbry && HOME=/tmp coverage run -p --source=lbry -m unittest discover -vv tests.unit
after_success:
- coverage combine lbry/
- bash <(curl -s https://codecov.io/bash)
- name: "LBRY Integration Tests"
install:
- pip install coverage tox-travis
- sudo mount -o mode=1777,nosuid,nodev -t tmpfs tmpfs /tmp
script: cd lbry && tox
after_success:
- coverage combine lbry
- bash <(curl -s https://codecov.io/bash)
- &torba-tests
name: "Torba Unit Tests"
env: TESTTYPE=unit
install:
- pip install coverage tox-travis
script: cd torba && tox
after_success:
- coverage combine torba/tests
- bash <(curl -s https://codecov.io/bash)
- <<: *torba-tests
name: "Torba Integration Tests"
env: TESTTYPE=integration
- name: "Run Examples"
install:
- make install
script:
- cd lbry && HOME=/tmp coverage run -p --source=lbry scripts/generate_json_api.py
after_success:
- coverage combine lbry
- bash <(curl -s https://codecov.io/bash)
- &build
stage: build
name: "Linux"
env: OS=linux
install:
- pip install pyinstaller awscli
- cd torba && pip install -e . && cd ..
- cd lbry
- python scripts/set_build.py
- pip install -e .
script:
- pyinstaller -F -n lbrynet lbry/extras/cli.py
- cd dist
- chmod +x lbrynet
- zip -j lbrynet-${OS}.zip lbrynet
- shasum -a 256 -b lbrynet-${OS}.zip
- ./lbrynet --version
after_success:
- aws configure set aws_access_key_id $ARTIFACTS_KEY
- aws configure set aws_secret_access_key $ARTIFACTS_SECRET
- aws configure set region us-east-1
- export S3_PATH="daemon/build-${TRAVIS_BUILD_NUMBER}_commit-${TRAVIS_COMMIT:0:7}_branch-${TRAVIS_BRANCH}$([ ! -z ${TRAVIS_TAG} ] && echo _tag-${TRAVIS_TAG})"
- aws s3 cp lbrynet-${OS}.zip s3://build.lbry.io/${S3_PATH}/lbrynet-${OS}.zip
deploy:
provider: releases
api_key:
secure: "unnR+aSJ1937Cl1PyBBZzGuZvV5W5TGcXELhXTgyOeeI6FgO/j80qmbNxJDA7qdFH/hvVicQFWoflhZu2dxN5rYP5BQJW3q3XoOLY3XAc1s1vicFkwqn3TIfdFiJTz+/D9eBUBBhHKeYFxm3M+thvklTLgjKl6fflh14NfGuNTevK9yQke8wewW3f9UmFTo1qNOPF1OsTZRbwua6oQYa59P+KukoPt4Dsu1VtILtTkj7hfEsUL79cjotwO3gkhYftxbl/xeDSZWOt+9Nhb8ZKmQG/uDx4JiTMm5lWRk4QB7pUujZ1CftxCYWz/lJx9nuJpdCOgP624tcHymErNlD+vGLwMTNslcXGYkAJH6xvGyxBJ+Obc8vRVnZbRM26BfH34TcPK1ueRxHSrDUbzMIIUsgcoZAxBuim8uDPp+K7bGqiygzSs2vQfr9U5Jhe9/F8sPdtNctfJZEfgmthNTeVFjyNsGIfIt754uGSfACqM7wDLh6fbKx7M+FHlNyOdvYCrbKUOAYXmTikYIpVDvlaaeMO+N+uW8Rhvm1j+JU7CVwhMavLySaPVc6Dt5OxiMMmxw9mVrjW9bBPjS5AkrS5MOA13T5wapoLzH6+gE92U4HzA6ilMcwRaQPSFnK2JU7tzyt2Wy1PH4MjHowXI2WyICG1x510dD3tX1P/1px8ro="
file: lbrynet-${OS}.zip
skip_cleanup: true
overwrite: true
draft: true
on:
tags: true
- <<: *build
name: "Mac"
os: osx
osx_image: xcode8.3
language: shell
env: OS=mac
before_install:
- brew upgrade python || true
- pip3 install --user --upgrade pip virtualenv
- /Users/travis/Library/Python/3.7/bin/virtualenv $HOME/venv
- source $HOME/venv/bin/activate
before_cache:
- brew cleanup
- <<: *build
name: "Windows"
os: windows
language: shell
env:
- OS=windows
- PATH=/c/Python37:/c/Python37/Scripts:/C/Windows/System32/downlevel:$PATH
before_install:
- choco install python --x86
- python -m pip install --upgrade pip
- pip install pywin32
script:
- pyinstaller --additional-hooks-dir=scripts/. --icon=icons/lbry256.ico -F -n lbrynet lbry/extras/cli.py
- cd dist
- 7z a -tzip lbrynet-windows.zip lbrynet.exe
- sha256sum -b lbrynet-windows.zip
- ./lbrynet.exe --version
- if: tag IS present
stage: build
name: "Wallet Server Docker Image - Tagged Release"
script:
- set -e
- echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin
- travis_retry docker build -t lbry/wallet-server:$TRAVIS_TAG -f lbry/scripts/Dockerfile.wallet_server .
- docker push lbry/wallet-server:$TRAVIS_TAG
- if: tag IS blank AND branch = master AND NOT type IN (pull_request)
stage: build
name: "Wallet Server Docker Image - Master"
script:
- set -e
- echo "$DOCKER_PASSWORD" | docker login --username "$DOCKER_USERNAME" --password-stdin
- travis_retry docker build -t lbry/wallet-server:master -f lbry/scripts/Dockerfile.wallet_server .
- docker push lbry/wallet-server:master
cache:
directories:
- $HOME/venv
- $HOME/.cache/pip
- $HOME/Library/Caches/pip
- $HOME/Library/Caches/Homebrew
- $TRAVIS_BUILD_DIR/.tox

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,6 @@
# Installing LBRY
If only the JSON-RPC API server is needed, the recommended way to install LBRY is to use a pre-built binary. We provide binaries for all major operating systems. See the [README](README.md)!
If only the JSON-RPC API server is needed, the recommended way to install LBRY is to use a pre-built binary. We provide binaries for all major operating systems. See the [README](README.md).
These instructions are for installing LBRY from source, which is recommended if you are interested in doing development work or LBRY is not available on your operating system (godspeed, TempleOS users).
@ -9,48 +9,36 @@ Here's a video walkthrough of this setup, which is itself hosted by the LBRY net
## Prerequisites
Running `lbrynet` from source requires Python 3.7. Get the installer for your OS [here](https://www.python.org/downloads/release/python-370/).
Running `lbrynet` from source requires Python 3.7 or higher. Get the installer for your OS [here](https://www.python.org/downloads/release/python-370/)
After installing Python 3.7, you'll need to install some additional libraries depending on your operating system.
Because of [issue #2769](https://github.com/lbryio/lbry-sdk/issues/2769)
at the moment the `lbrynet` daemon will only work correctly with Python 3.7.
If Python 3.8+ is used, the daemon will start but the RPC server
may not accept messages, returning the following:
```
Could not connect to daemon. Are you sure it's running?
```
After installing python 3, you'll need to install some additional libraries depending on your operating system.
### macOS
macOS users will need to install [xcode command line tools](https://developer.xamarin.com/guides/testcloud/calabash/configuring/osx/install-xcode-command-line-tools/) and [homebrew](http://brew.sh/).
These environment variables also need to be set:
```
PYTHONUNBUFFERED=1
EVENT_NOKQUEUE=1
```
These environment variables also need to be set
1. PYTHONUNBUFFERED=1
2. EVENT_NOKQUEUE=1
Remaining dependencies can then be installed by running:
```
brew install python protobuf
```
Assistance installing Python3: https://docs.python-guide.org/starting/install3/osx/.
Assistance installing Python3: https://docs.python-guide.org/starting/install3/osx/
### Linux
On Ubuntu (we recommend 18.04 or 20.04), install the following:
On Ubuntu (16.04 minimum, we recommend 18.04), install the following:
```
sudo add-apt-repository ppa:deadsnakes/ppa
sudo apt-get update
sudo apt-get install build-essential python3.7 python3.7-dev git python3.7-venv libssl-dev python-protobuf
```
The [deadsnakes PPA](https://launchpad.net/~deadsnakes/+archive/ubuntu/ppa) provides Python 3.7
for those Ubuntu distributions that no longer have it in their
official repositories.
On Raspbian, you will also need to install `python-pyparsing`.
If you're running another Linux distro, install the equivalent of the above packages for your system.
@ -59,119 +47,65 @@ If you're running another Linux distro, install the equivalent of the above pack
### Linux/Mac
Clone the repository:
```bash
git clone https://github.com/lbryio/lbry-sdk.git
cd lbry-sdk
```
To install on Linux/Mac:
Create a Python virtual environment for lbry-sdk:
```bash
python3.7 -m venv lbry-venv
```
```
Clone the repository:
$ git clone https://github.com/lbryio/lbry-sdk.git
$ cd lbry-sdk
Activate virtual environment:
```bash
source lbry-venv/bin/activate
```
Create a Python virtual environment for lbry-sdk:
$ python3.7 -m venv lbry-venv
Make sure you're on Python 3.7+ as default in the virtual environment:
```bash
python --version
```
Activating lbry-sdk virtual environment:
$ source lbry-venv/bin/activate
Install packages:
```bash
make install
```
Make sure you're on Python 3.7+ (as the default Python in virtual environment):
$ python --version
If you are on Linux and using PyCharm, generates initial configs:
```bash
make idea
```
Install packages:
$ make install
To verify your installation, `which lbrynet` should return a path inside
of the `lbry-venv` folder.
```bash
(lbry-venv) $ which lbrynet
/opt/lbry-sdk/lbry-venv/bin/lbrynet
```
If you are on Linux and using PyCharm, generates initial configs:
$ make idea
```
To exit the virtual environment simply use the command `deactivate`.
To verify your installation, `which lbrynet` should return a path inside of the `lbry-venv` folder created by the `python3.7 -m venv lbry-venv` command.
### Windows
Clone the repository:
```bash
git clone https://github.com/lbryio/lbry-sdk.git
cd lbry-sdk
```
To install on Windows:
Create a Python virtual environment for lbry-sdk:
```bash
python -m venv lbry-venv
```
```
Clone the repository:
> git clone https://github.com/lbryio/lbry-sdk.git
> cd lbry-sdk
Activate virtual environment:
```bash
lbry-venv\Scripts\activate
```
Create a Python virtual environment for lbry-sdk:
> python -m venv lbry-venv
Install packages:
```bash
pip install -e .
```
Activating lbry-sdk virtual environment:
> lbry-venv\Scripts\activate
Install packages:
> cd torba
> pip install -e .
> cd ../lbry
> pip install -e .
```
## Run the tests
### Elasticsearch
For running integration tests, Elasticsearch is required to be available at localhost:9200/
To run the unit tests from the repo directory:
The easiest way to start it is using docker with:
```bash
make elastic-docker
```
Alternative installation methods are available [at Elasticsearch website](https://www.elastic.co/guide/en/elasticsearch/reference/current/install-elasticsearch.html).
To run the unit and integration tests from the repo directory:
```
python -m unittest discover tests.unit
python -m unittest discover tests.integration
```
```
python -m unittest discover -s lbry tests.unit
```
## Usage
To start the API server:
```
lbrynet start
```
`lbrynet start`
Whenever the code inside [lbry-sdk/lbry](./lbry)
is modified we should run `make install` to recompile the `lbrynet`
executable with the newest code.
## Development
When developing, remember to enter the environment,
and if you wish start the server interactively.
```bash
$ source lbry-venv/bin/activate
(lbry-venv) $ python lbry/extras/cli.py start
```
Parameters can be passed in the same way.
```bash
(lbry-venv) $ python lbry/extras/cli.py wallet balance
```
If a Python debugger (`pdb` or `ipdb`) is installed we can also start it
in this way, set up break points, and step through the code.
```bash
(lbry-venv) $ pip install ipdb
(lbry-venv) $ ipdb lbry/extras/cli.py
```
Happy hacking!

View file

@ -1,6 +1,6 @@
The MIT License (MIT)
Copyright (c) 2015-2022 LBRY Inc
Copyright (c) 2015-2019 LBRY Inc
Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish,

View file

@ -1,26 +1,18 @@
.PHONY: install tools lint test test-unit test-unit-coverage test-integration idea
install:
pip install -e .
cd torba && pip install -e .
cd lbry && pip install -e .
pip install mypy==0.701
pip install coverage astroid pylint
lint:
pylint --rcfile=setup.cfg lbry
#mypy --ignore-missing-imports lbry
cd lbry && pylint lbry
cd torba && pylint --rcfile=setup.cfg torba
cd torba && mypy --ignore-missing-imports torba
test: test-unit test-integration
test-unit:
python -m unittest discover tests.unit
test-unit-coverage:
coverage run --source=lbry -m unittest discover -vv tests.unit
test-integration:
tox
test:
cd lbry && tox
cd torba && tox
idea:
mkdir -p .idea
cp -r scripts/idea/* .idea
elastic-docker:
docker run -d -v lbryhub:/usr/share/elasticsearch/data -p 9200:9200 -p 9300:9300 -e"ES_JAVA_OPTS=-Xms512m -Xmx512m" -e "discovery.type=single-node" docker.elastic.co/elasticsearch/elasticsearch:7.12.1
cp -r lbry/scripts/idea/* .idea

View file

@ -1,15 +1,15 @@
# <img src="https://raw.githubusercontent.com/lbryio/lbry-sdk/master/lbry.png" alt="LBRY" width="48" height="36" /> LBRY SDK [![build](https://github.com/lbryio/lbry-sdk/actions/workflows/main.yml/badge.svg)](https://github.com/lbryio/lbry-sdk/actions/workflows/main.yml) [![coverage](https://coveralls.io/repos/github/lbryio/lbry-sdk/badge.svg)](https://coveralls.io/github/lbryio/lbry-sdk)
# <img src="https://raw.githubusercontent.com/lbryio/lbry-sdk/master/lbry/lbry.png" alt="LBRY" width="48" height="36" /> LBRY SDK [![Build Status](https://travis-ci.org/lbryio/lbry-sdk.svg?branch=master)](https://travis-ci.org/lbryio/lbry-sdk) [![Test Coverage](https://codecov.io/gh/lbryio/lbry-sdk/branch/master/graph/badge.svg)](https://codecov.io/gh/lbryio/lbry-sdk)
LBRY is a decentralized peer-to-peer protocol for publishing and accessing digital content. It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, rights and access rules. LBRY also provides a data network that consists of peers (seeders) uploading and downloading data from other peers, possibly in exchange for payments, as well as a distributed hash table used by peers to discover other peers.
LBRY SDK for Python is currently the most fully featured implementation of the LBRY Network protocols and includes many useful components and tools for building decentralized applications. Primary features and components include:
LBRY SDK for Python is currently the most full featured implementation of the LBRY Network protocols and includes many useful components and tools for building decentralized applications. Primary features and components:
* Built on Python 3.7 and `asyncio`.
* Kademlia DHT (Distributed Hash Table) implementation for finding peers to download from and announcing to peers what we have to host ([lbry.dht](https://github.com/lbryio/lbry-sdk/tree/master/lbry/dht)).
* Blob exchange protocol for transferring encrypted blobs of content and negotiating payments ([lbry.blob_exchange](https://github.com/lbryio/lbry-sdk/tree/master/lbry/blob_exchange)).
* Protobuf schema for encoding and decoding metadata stored on the blockchain ([lbry.schema](https://github.com/lbryio/lbry-sdk/tree/master/lbry/schema)).
* Wallet implementation for the LBRY blockchain ([lbry.wallet](https://github.com/lbryio/lbry-sdk/tree/master/lbry/wallet)).
* Daemon with a JSON-RPC API to ease building end user applications in any language and for automating various tasks ([lbry.extras.daemon](https://github.com/lbryio/lbry-sdk/tree/master/lbry/extras/daemon)).
* Built on Python 3.7+ and `asyncio`.
* Kademlia DHT (Distributed Hash Table) implementation for finding peers to download from and announcing to peers what we have to host ([lbry.dht](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/dht)).
* Blob exchange protocol for transferring encrypted blobs of content and negotiating payments ([lbry.blob_exchange](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/blob_exchange)).
* Protobuf schema for encoding and decoding metadata stored on the blockchain ([lbry.schema](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/schema)).
* Wallet implementation for the LBRY blockchain ([lbry.wallet](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/wallet)).
* Daemon with a JSON-RPC API to ease building end user applications in any language and for automating various tasks ([lbry.extras.daemon](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/extras/daemon)).
## Installation
@ -41,7 +41,7 @@ This project is MIT licensed. For the full license, see [LICENSE](LICENSE).
## Security
We take security seriously. Please contact security@lbry.com regarding any security issues. [Our PGP key is here](https://lbry.com/faq/pgp-key) if you need it.
We take security seriously. Please contact security@lbry.com regarding any security issues. [Our GPG key is here](https://lbry.com/faq/gpg-key) if you need it.
## Contact
@ -53,4 +53,4 @@ The documentation for the API can be found [here](https://lbry.tech/api/sdk).
Daemon defaults, ports, and other settings are documented [here](https://lbry.tech/resources/daemon-settings).
Settings can be configured using a daemon-settings.yml file. An example can be found [here](https://github.com/lbryio/lbry-sdk/blob/master/example_daemon_settings.yml).
Settings can be configured using a daemon-settings.yml file. An example can be found [here](https://github.com/lbryio/lbry-sdk/blob/master/lbry/example_daemon_settings.yml).

View file

@ -1,9 +0,0 @@
# Security Policy
## Supported Versions
While we are not at v1.0 yet, only the latest release will be supported.
## Reporting a Vulnerability
See https://lbry.com/faq/security

View file

@ -1,43 +0,0 @@
FROM debian:10-slim
ARG user=lbry
ARG projects_dir=/home/$user
ARG db_dir=/database
ARG DOCKER_TAG
ARG DOCKER_COMMIT=docker
ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
automake libtool \
tar unzip \
build-essential \
pkg-config \
libleveldb-dev \
python3.7 \
python3-dev \
python3-pip \
python3-wheel \
python3-setuptools && \
update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1 && \
rm -rf /var/lib/apt/lists/*
RUN groupadd -g 999 $user && useradd -m -u 999 -g $user $user
COPY . $projects_dir
RUN chown -R $user:$user $projects_dir
RUN mkdir -p $db_dir
RUN chown -R $user:$user $db_dir
USER $user
WORKDIR $projects_dir
RUN python3 -m pip install -U setuptools pip
RUN make install
RUN python3 docker/set_build.py
RUN rm ~/.cache -rf
VOLUME $db_dir
ENTRYPOINT ["python3", "scripts/dht_node.py"]

View file

@ -1,56 +0,0 @@
FROM debian:10-slim
ARG user=lbry
ARG db_dir=/database
ARG projects_dir=/home/$user
ARG DOCKER_TAG
ARG DOCKER_COMMIT=docker
ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
tar unzip \
build-essential \
automake libtool \
pkg-config \
libleveldb-dev \
python3.7 \
python3-dev \
python3-pip \
python3-wheel \
python3-cffi \
python3-setuptools && \
update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1 && \
rm -rf /var/lib/apt/lists/*
RUN groupadd -g 999 $user && useradd -m -u 999 -g $user $user
RUN mkdir -p $db_dir
RUN chown -R $user:$user $db_dir
COPY . $projects_dir
RUN chown -R $user:$user $projects_dir
USER $user
WORKDIR $projects_dir
RUN pip install uvloop
RUN make install
RUN python3 docker/set_build.py
RUN rm ~/.cache -rf
# entry point
ARG host=0.0.0.0
ARG tcp_port=50001
ARG daemon_url=http://lbry:lbry@localhost:9245/
VOLUME $db_dir
ENV TCP_PORT=$tcp_port
ENV HOST=$host
ENV DAEMON_URL=$daemon_url
ENV DB_DIRECTORY=$db_dir
ENV MAX_SESSIONS=1000000000
ENV MAX_SEND=1000000000000000000
ENV EVENT_LOOP_POLICY=uvloop
COPY ./docker/wallet_server_entrypoint.sh /entrypoint.sh
ENTRYPOINT ["/entrypoint.sh"]

View file

@ -1,45 +0,0 @@
FROM debian:10-slim
ARG user=lbry
ARG downloads_dir=/database
ARG projects_dir=/home/$user
ARG DOCKER_TAG
ARG DOCKER_COMMIT=docker
ENV DOCKER_TAG=$DOCKER_TAG DOCKER_COMMIT=$DOCKER_COMMIT
RUN apt-get update && \
apt-get -y --no-install-recommends install \
wget \
automake libtool \
tar unzip \
build-essential \
pkg-config \
libleveldb-dev \
python3.7 \
python3-dev \
python3-pip \
python3-wheel \
python3-setuptools && \
update-alternatives --install /usr/bin/pip pip /usr/bin/pip3 1 && \
rm -rf /var/lib/apt/lists/*
RUN groupadd -g 999 $user && useradd -m -u 999 -g $user $user
RUN mkdir -p $downloads_dir
RUN chown -R $user:$user $downloads_dir
COPY . $projects_dir
RUN chown -R $user:$user $projects_dir
USER $user
WORKDIR $projects_dir
RUN pip install uvloop
RUN make install
RUN python3 docker/set_build.py
RUN rm ~/.cache -rf
# entry point
VOLUME $downloads_dir
COPY ./docker/webconf.yaml /webconf.yaml
ENTRYPOINT ["/home/lbry/.local/bin/lbrynet", "start", "--config=/webconf.yaml"]

View file

@ -1,9 +0,0 @@
### How to run with docker-compose
1. Edit config file and after that fix permissions with
```
sudo chown -R 999:999 webconf.yaml
```
2. Start SDK with
```
docker-compose up -d
```

View file

@ -1,49 +0,0 @@
version: "3"
volumes:
wallet_server:
es01:
services:
wallet_server:
depends_on:
- es01
image: lbry/wallet-server:${WALLET_SERVER_TAG:-latest-release}
restart: always
network_mode: host
ports:
- "50001:50001" # rpc port
- "2112:2112" # uncomment to enable prometheus
volumes:
- "wallet_server:/database"
environment:
- DAEMON_URL=http://lbry:lbry@127.0.0.1:9245
- MAX_QUERY_WORKERS=4
- CACHE_MB=1024
- CACHE_ALL_TX_HASHES=
- CACHE_ALL_CLAIM_TXOS=
- MAX_SEND=1000000000000000000
- MAX_RECEIVE=1000000000000000000
- MAX_SESSIONS=100000
- HOST=0.0.0.0
- TCP_PORT=50001
- PROMETHEUS_PORT=2112
- FILTERING_CHANNEL_IDS=770bd7ecba84fd2f7607fb15aedd2b172c2e153f 95e5db68a3101df19763f3a5182e4b12ba393ee8
- BLOCKING_CHANNEL_IDS=dd687b357950f6f271999971f43c785e8067c3a9 06871aa438032244202840ec59a469b303257cad b4a2528f436eca1bf3bf3e10ff3f98c57bd6c4c6
es01:
image: docker.elastic.co/elasticsearch/elasticsearch:7.11.0
container_name: es01
environment:
- node.name=es01
- discovery.type=single-node
- indices.query.bool.max_clause_count=8192
- bootstrap.memory_lock=true
- "ES_JAVA_OPTS=-Xms4g -Xmx4g" # no more than 32, remember to disable swap
ulimits:
memlock:
soft: -1
hard: -1
volumes:
- es01:/usr/share/elasticsearch/data
ports:
- 127.0.0.1:9200:9200

View file

@ -1,9 +0,0 @@
version: '3'
services:
websdk:
image: vshyba/websdk
ports:
- '5279:5279'
- '5280:5280'
volumes:
- ./webconf.yaml:/webconf.yaml

View file

@ -1,7 +0,0 @@
#!/bin/bash
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )"
cd "$DIR/../.." ## make sure we're in the right place. Docker Hub screws this up sometimes
echo "docker build dir: $(pwd)"
docker build --build-arg DOCKER_TAG=$DOCKER_TAG --build-arg DOCKER_COMMIT=$SOURCE_COMMIT -f $DOCKERFILE_PATH -t $IMAGE_NAME .

View file

@ -1,11 +0,0 @@
# requires powershell and .NET 4+. see https://chocolatey.org/install for more info.
$chocoVersion = powershell choco -v
if(-not($chocoVersion)){
Write-Output "Chocolatey is not installed, installing now"
Write-Output "IF YOU KEEP GETTING THIS MESSAGE ON EVERY BUILD, TRY RESTARTING THE GITLAB RUNNER SO IT GETS CHOCO INTO IT'S ENV"
Set-ExecutionPolicy Bypass -Scope Process -Force; iex ((New-Object System.Net.WebClient).DownloadString('https://chocolatey.org/install.ps1'))
}
else{
Write-Output "Chocolatey version $chocoVersion is already installed"
}

View file

@ -1,44 +0,0 @@
import sys
import os
import re
import logging
import lbry.build_info as build_info_mod
log = logging.getLogger()
log.addHandler(logging.StreamHandler())
log.setLevel(logging.DEBUG)
def _check_and_set(d: dict, key: str, value: str):
try:
d[key]
except KeyError:
raise Exception(f"{key} var does not exist in {build_info_mod.__file__}")
d[key] = value
def main():
build_info = {item: build_info_mod.__dict__[item] for item in dir(build_info_mod) if not item.startswith("__")}
commit_hash = os.getenv('DOCKER_COMMIT', os.getenv('GITHUB_SHA'))
if commit_hash is None:
raise ValueError("Commit hash not found in env vars")
_check_and_set(build_info, "COMMIT_HASH", commit_hash[:6])
docker_tag = os.getenv('DOCKER_TAG')
if docker_tag:
_check_and_set(build_info, "DOCKER_TAG", docker_tag)
_check_and_set(build_info, "BUILD", "docker")
else:
if re.match(r'refs/tags/v\d+\.\d+\.\d+$', str(os.getenv('GITHUB_REF'))):
_check_and_set(build_info, "BUILD", "release")
else:
_check_and_set(build_info, "BUILD", "qa")
log.debug("build info: %s", ", ".join([f"{k}={v}" for k, v in build_info.items()]))
with open(build_info_mod.__file__, 'w') as f:
f.write("\n".join([f"{k} = \"{v}\"" for k, v in build_info.items()]) + "\n")
if __name__ == '__main__':
sys.exit(main())

View file

@ -1,25 +0,0 @@
#!/bin/bash
# entrypoint for wallet server Docker image
set -euo pipefail
SNAPSHOT_URL="${SNAPSHOT_URL:-}" #off by default. latest snapshot at https://lbry.com/snapshot/wallet
if [[ -n "$SNAPSHOT_URL" ]] && [[ ! -f /database/lbry-leveldb ]]; then
files="$(ls)"
echo "Downloading wallet snapshot from $SNAPSHOT_URL"
wget --no-verbose --trust-server-names --content-disposition "$SNAPSHOT_URL"
echo "Extracting snapshot..."
filename="$(grep -vf <(echo "$files") <(ls))" # finds the file that was not there before
case "$filename" in
*.tgz|*.tar.gz|*.tar.bz2 ) tar xvf "$filename" --directory /database ;;
*.zip ) unzip "$filename" -d /database ;;
* ) echo "Don't know how to extract ${filename}. SNAPSHOT COULD NOT BE LOADED" && exit 1 ;;
esac
rm "$filename"
fi
/home/lbry/.local/bin/lbry-hub-elastic-sync
echo 'starting server'
/home/lbry/.local/bin/lbry-hub "$@"

View file

@ -1,9 +0,0 @@
allowed_origin: "*"
max_key_fee: "0.0 USD"
save_files: false
save_blobs: false
streaming_server: "0.0.0.0:5280"
api: "0.0.0.0:5279"
data_dir: /tmp
download_dir: /tmp
wallet_dir: /tmp

File diff suppressed because one or more lines are too long

1
lbry/.gitattributes vendored Normal file
View file

@ -0,0 +1 @@
/CHANGELOG.md merge=union

13
lbry/.gitignore vendored Normal file
View file

@ -0,0 +1,13 @@
.DS_Store
/build
/dist
/.tox
/.idea
/.coverage
/lbry-venv
lbry.egg-info
__pycache__
_trial_temp/
/tests/integration/files

441
lbry/.pylintrc Normal file
View file

@ -0,0 +1,441 @@
[MASTER]
# Specify a configuration file.
#rcfile=
# Python code to execute, usually for sys.path manipulation such as
# pygtk.require().
#init-hook=
# Add files or directories to the blacklist. They should be base names, not
# paths.
ignore=CVS,schema
# Add files or directories matching the regex patterns to the
# blacklist. The regex matches against base names, not paths.
# `\.#.*` - add emacs tmp files to the blacklist
ignore-patterns=\.#.*
# Pickle collected data for later comparisons.
persistent=yes
# List of plugins (as comma separated values of python modules names) to load,
# usually to register additional checkers.
load-plugins=
# Use multiple processes to speed up Pylint.
jobs=4
# Allow loading of arbitrary C extensions. Extensions are imported into the
# active Python interpreter and may run arbitrary code.
unsafe-load-any-extension=no
# A comma-separated list of package or module names from where C extensions may
# be loaded. Extensions are loading into the active Python interpreter and may
# run arbitrary code
# extension-pkg-whitelist=
# Allow optimization of some AST trees. This will activate a peephole AST
# optimizer, which will apply various small optimizations. For instance, it can
# be used to obtain the result of joining multiple strings with the addition
# operator. Joining a lot of strings can lead to a maximum recursion error in
# Pylint and this flag can prevent that. It has one side effect, the resulting
# AST will be different than the one from reality.
optimize-ast=no
[MESSAGES CONTROL]
# Only show warnings with the listed confidence levels. Leave empty to show
# all. Valid levels: HIGH, INFERENCE, INFERENCE_FAILURE, UNDEFINED
confidence=
# Enable the message, report, category or checker with the given id(s). You can
# either give multiple identifier separated by comma (,) or put this option
# multiple time (only on the command line, not in the configuration file where
# it should appear only once). See also the "--disable" option for examples.
#enable=
# Disable the message, report, category or checker with the given id(s). You
# can either give multiple identifiers separated by comma (,) or put this
# option multiple times (only on the command line, not in the configuration
# file where it should appear only once).You can also use "--disable=all" to
# disable everything first and then re-enable specific checks. For example, if
# you want to run only the similarities checker, you can use "--disable=all
# --enable=similarities". If you want to run only the classes checker, but have
# no Warning level messages displayed, use"--disable=all --enable=classes
# --disable=W"
disable=
anomalous-backslash-in-string,
arguments-differ,
attribute-defined-outside-init,
bad-continuation,
bare-except,
broad-except,
cell-var-from-loop,
consider-iterating-dictionary,
dangerous-default-value,
duplicate-code,
fixme,
invalid-name,
len-as-condition,
locally-disabled,
logging-not-lazy,
missing-docstring,
no-else-return,
no-init,
no-member,
no-self-use,
protected-access,
redefined-builtin,
redefined-outer-name,
redefined-variable-type,
relative-import,
signature-differs,
super-init-not-called,
too-few-public-methods,
too-many-arguments,
too-many-branches,
too-many-instance-attributes,
too-many-lines,
too-many-locals,
too-many-nested-blocks,
too-many-public-methods,
too-many-return-statements,
too-many-statements,
trailing-newlines,
undefined-loop-variable,
ungrouped-imports,
unnecessary-lambda,
unused-argument,
unused-variable,
wrong-import-order,
wrong-import-position,
deprecated-lambda,
simplifiable-if-statement,
unidiomatic-typecheck,
inconsistent-return-statements,
keyword-arg-before-vararg,
assignment-from-no-return,
useless-return,
assignment-from-none,
stop-iteration-return
[REPORTS]
# Set the output format. Available formats are text, parseable, colorized, msvs
# (visual studio) and html. You can also give a reporter class, eg
# mypackage.mymodule.MyReporterClass.
output-format=text
# Put messages in a separate file for each module / package specified on the
# command line instead of printing them on stdout. Reports (if any) will be
# written in a file name "pylint_global.[txt|html]".
files-output=no
# Tells whether to display a full report or only the messages
reports=no
# Python expression which should return a note less than 10 (10 is the highest
# note). You have access to the variables errors warning, statement which
# respectively contain the number of errors / warnings messages and the total
# number of statements analyzed. This is used by the global evaluation report
# (RP0004).
evaluation=10.0 - ((float(5 * error + warning + refactor + convention) / statement) * 10)
# Template used to display messages. This is a python new-style format string
# used to format the message information. See doc for all details
#msg-template=
[VARIABLES]
# Tells whether we should check for unused import in __init__ files.
init-import=no
# A regular expression matching the name of dummy variables (i.e. expectedly
# not used).
dummy-variables-rgx=_$|dummy
# List of additional names supposed to be defined in builtins. Remember that
# you should avoid to define new builtins when possible.
additional-builtins=
# List of strings which can identify a callback function by name. A callback
# name must start or end with one of those strings.
callbacks=cb_,_cb
[LOGGING]
# Logging modules to check that the string format arguments are in logging
# function parameter format
logging-modules=logging
[BASIC]
# List of builtins function names that should not be used, separated by a comma
bad-functions=map,filter,input
# Good variable names which should always be accepted, separated by a comma
# allow `d` as its used frequently for deferred callback chains
good-names=i,j,k,ex,Run,_,d
# Bad variable names which should always be refused, separated by a comma
bad-names=foo,bar,baz,toto,tutu,tata
# Colon-delimited sets of names that determine each other's naming style when
# the name regexes allow several styles.
name-group=
# Include a hint for the correct naming format with invalid-name
include-naming-hint=no
# Regular expression matching correct function names
function-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for function names
function-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct variable names
variable-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for variable names
variable-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct constant names
const-rgx=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Naming hint for constant names
const-name-hint=(([A-Z_][A-Z0-9_]*)|(__.*__))$
# Regular expression matching correct attribute names
attr-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for attribute names
attr-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct argument names
argument-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for argument names
argument-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression matching correct class attribute names
class-attribute-rgx=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Naming hint for class attribute names
class-attribute-name-hint=([A-Za-z_][A-Za-z0-9_]{2,30}|(__.*__))$
# Regular expression matching correct inline iteration names
inlinevar-rgx=[A-Za-z_][A-Za-z0-9_]*$
# Naming hint for inline iteration names
inlinevar-name-hint=[A-Za-z_][A-Za-z0-9_]*$
# Regular expression matching correct class names
class-rgx=[A-Z_][a-zA-Z0-9]+$
# Naming hint for class names
class-name-hint=[A-Z_][a-zA-Z0-9]+$
# Regular expression matching correct module names
module-rgx=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Naming hint for module names
module-name-hint=(([a-z_][a-z0-9_]*)|([A-Z][a-zA-Z0-9]+))$
# Regular expression matching correct method names
method-rgx=[a-z_][a-z0-9_]{2,30}$
# Naming hint for method names
method-name-hint=[a-z_][a-z0-9_]{2,30}$
# Regular expression which should only match function or class names that do
# not require a docstring.
no-docstring-rgx=^_
# Minimum line length for functions/classes that require docstrings, shorter
# ones are exempt.
docstring-min-length=-1
[ELIF]
# Maximum number of nested blocks for function / method body
max-nested-blocks=5
[SPELLING]
# Spelling dictionary name. Available dictionaries: none. To make it working
# install python-enchant package.
spelling-dict=
# List of comma separated words that should not be checked.
spelling-ignore-words=
# A path to a file that contains private dictionary; one word per line.
spelling-private-dict-file=
# Tells whether to store unknown words to indicated private dictionary in
# --spelling-private-dict-file option instead of raising a message.
spelling-store-unknown-words=no
[FORMAT]
# Maximum number of characters on a single line.
max-line-length=120
# Regexp for a line that is allowed to be longer than the limit.
ignore-long-lines=^\s*(# )?<?https?://\S+>?$
# Allow the body of an if to be on the same line as the test if there is no
# else.
single-line-if-stmt=no
# List of optional constructs for which whitespace checking is disabled. `dict-
# separator` is used to allow tabulation in dicts, etc.: {1 : 1,\n222: 2}.
# `trailing-comma` allows a space between comma and closing bracket: (a, ).
# `empty-line` allows space-only lines.
no-space-check=trailing-comma,dict-separator
# Maximum number of lines in a module
max-module-lines=1000
# String used as indentation unit. This is usually " " (4 spaces) or "\t" (1
# tab).
indent-string=' '
# Number of spaces of indent required inside a hanging or continued line.
indent-after-paren=4
# Expected format of line ending, e.g. empty (any line ending), LF or CRLF.
expected-line-ending-format=
[MISCELLANEOUS]
# List of note tags to take in consideration, separated by a comma.
notes=FIXME,XXX,TODO
[SIMILARITIES]
# Minimum lines number of a similarity.
min-similarity-lines=4
# Ignore comments when computing similarities.
ignore-comments=yes
# Ignore docstrings when computing similarities.
ignore-docstrings=yes
# Ignore imports when computing similarities.
ignore-imports=no
[TYPECHECK]
# Tells whether missing members accessed in mixin class should be ignored. A
# mixin class is detected if its name ends with "mixin" (case insensitive).
ignore-mixin-members=yes
# List of module names for which member attributes should not be checked
# (useful for modules/projects where namespaces are manipulated during runtime
# and thus existing member attributes cannot be deduced by static analysis. It
# supports qualified module names, as well as Unix pattern matching.
ignored-modules=leveldb,distutils
# Ignoring distutils because: https://github.com/PyCQA/pylint/issues/73
# List of classes names for which member attributes should not be checked
# (useful for classes with attributes dynamically set). This supports can work
# with qualified names.
# ignored-classes=
# List of members which are set dynamically and missed by pylint inference
# system, and so shouldn't trigger E1101 when accessed. Python regular
# expressions are accepted.
generated-members=lbrynet.lbrynet_daemon.LBRYDaemon.Parameters
[IMPORTS]
# Deprecated modules which should not be used, separated by a comma
deprecated-modules=regsub,TERMIOS,Bastion,rexec
# Create a graph of every (i.e. internal and external) dependencies in the
# given file (report RP0402 must not be disabled)
import-graph=
# Create a graph of external dependencies in the given file (report RP0402 must
# not be disabled)
ext-import-graph=
# Create a graph of internal dependencies in the given file (report RP0402 must
# not be disabled)
int-import-graph=
[DESIGN]
# Maximum number of arguments for function / method
max-args=10
# Argument names that match this expression will be ignored. Default to name
# with leading underscore
ignored-argument-names=_.*
# Maximum number of locals for function / method body
max-locals=15
# Maximum number of return / yield for function / method body
max-returns=6
# Maximum number of branch for function / method body
max-branches=12
# Maximum number of statements in function / method body
max-statements=50
# Maximum number of parents for a class (see R0901).
max-parents=8
# Maximum number of attributes for a class (see R0902).
max-attributes=7
# Minimum number of public methods for a class (see R0903).
min-public-methods=2
# Maximum number of public methods for a class (see R0904).
max-public-methods=20
# Maximum number of boolean expressions in a if statement
max-bool-expr=5
[CLASSES]
# List of method names used to declare (i.e. assign) instance attributes.
defining-attr-methods=__init__,__new__,setUp
# List of valid names for the first argument in a class method.
valid-classmethod-first-arg=cls
# List of valid names for the first argument in a metaclass class method.
valid-metaclass-classmethod-first-arg=mcs
# List of member names, which should be excluded from the protected access
# warning.
exclude-protected=_asdict,_fields,_replace,_source,_make
[EXCEPTIONS]
# Exceptions that will emit a warning when being caught. Defaults to
# "Exception"
overgeneral-exceptions=Exception

56
lbry/README.md Normal file
View file

@ -0,0 +1,56 @@
# <img src="https://raw.githubusercontent.com/lbryio/lbry-sdk/master/lbry/lbry.png" alt="LBRY" width="48" height="36" /> LBRY SDK [![Build Status](https://travis-ci.org/lbryio/lbry-sdk.svg?branch=master)](https://travis-ci.org/lbryio/lbry-sdk) [![Test Coverage](https://codecov.io/gh/lbryio/lbry-sdk/branch/master/graph/badge.svg)](https://codecov.io/gh/lbryio/lbry-sdk)
LBRY is a decentralized peer-to-peer protocol for publishing and accessing digital content. It utilizes the [LBRY blockchain](https://github.com/lbryio/lbrycrd) as a global namespace and database of digital content. Blockchain entries contain searchable content metadata, identities, rights and access rules. LBRY also provides a data network that consists of peers (seeders) uploading and downloading data from other peers, possibly in exchange for payments, as well as a distributed hash table used by peers to discover other peers.
LBRY SDK for Python is currently the most full featured implementation of the LBRY Network protocols and includes many useful components and tools for building decentralized applications. Primary features and components:
* Built on Python 3.7+ and `asyncio`.
* Kademlia DHT (Distributed Hash Table) implementation for finding peers to download from and announcing to peers what we have to host ([lbry.dht](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/dht)).
* Blob exchange protocol for transferring encrypted blobs of content and negotiating payments ([lbry.blob_exchange](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/blob_exchange)).
* Protobuf schema for encoding and decoding metadata stored on the blockchain ([lbry.schema](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/schema)).
* Wallet implementation for the LBRY blockchain ([lbry.wallet](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/wallet)).
* Daemon with a JSON-RPC API to ease building end user applications in any language and for automating various tasks ([lbry.extras.daemon](https://github.com/lbryio/lbry-sdk/tree/master/lbry/lbry/extras/daemon)).
## Installation
Our [releases page](https://github.com/lbryio/lbry-sdk/releases) contains pre-built binaries of the latest release, pre-releases, and past releases for macOS, Debian-based Linux, and Windows. [Automated travis builds](http://build.lbry.io/daemon/) are also available for testing.
## Usage
Run `lbrynet start` to launch the API server.
By default, `lbrynet` will provide a JSON-RPC server at `http://localhost:5279`. It is easy to interact with via cURL or sane programming languages.
Our [quickstart guide](https://lbry.tech/playground) provides a simple walkthrough and examples for learning.
With the daemon running, `lbrynet commands` will show you a list of commands.
The full API is documented [here](https://lbry.tech/api/sdk).
## Running from source
Installing from source is also relatively painless. Full instructions are in [INSTALL.md](INSTALL.md)
## Contributing
Contributions to this project are welcome, encouraged, and compensated. For more details, please check [this](https://lbry.tech/contribute) link.
## License
This project is MIT licensed. For the full license, see [LICENSE](LICENSE).
## Security
We take security seriously. Please contact security@lbry.com regarding any security issues. [Our GPG key is here](https://lbry.com/faq/gpg-key) if you need it.
## Contact
The primary contact for this project is [@eukreign](mailto:lex@lbry.com).
## Additional information and links
The documentation for the API can be found [here](https://lbry.tech/api/sdk).
Daemon defaults, ports, and other settings are documented [here](https://lbry.tech/resources/daemon-settings).
Settings can be configured using a daemon-settings.yml file. An example can be found [here](https://github.com/lbryio/lbry-sdk/blob/master/lbry/example_daemon_settings.yml).

View file

@ -1,2 +0,0 @@
__version__ = "0.113.0"
version = tuple(map(int, __version__.split('.'))) # pylint: disable=invalid-name

View file

@ -1,6 +0,0 @@
from lbry.utils import get_lbry_hash_obj
MAX_BLOB_SIZE = 2 * 2 ** 20
# digest_size is in bytes, and blob hashes are hex encoded
BLOBHASH_LENGTH = get_lbry_hash_obj().digest_size * 2

View file

@ -1,77 +0,0 @@
import asyncio
import logging
log = logging.getLogger(__name__)
class DiskSpaceManager:
def __init__(self, config, db, blob_manager, cleaning_interval=30 * 60, analytics=None):
self.config = config
self.db = db
self.blob_manager = blob_manager
self.cleaning_interval = cleaning_interval
self.running = False
self.task = None
self.analytics = analytics
self._used_space_bytes = None
async def get_free_space_mb(self, is_network_blob=False):
limit_mb = self.config.network_storage_limit if is_network_blob else self.config.blob_storage_limit
space_used_mb = await self.get_space_used_mb()
space_used_mb = space_used_mb['network_storage'] if is_network_blob else space_used_mb['content_storage']
return max(0, limit_mb - space_used_mb)
async def get_space_used_bytes(self):
self._used_space_bytes = await self.db.get_stored_blob_disk_usage()
return self._used_space_bytes
async def get_space_used_mb(self, cached=True):
cached = cached and self._used_space_bytes is not None
space_used_bytes = self._used_space_bytes if cached else await self.get_space_used_bytes()
return {key: int(value/1024.0/1024.0) for key, value in space_used_bytes.items()}
async def clean(self):
await self._clean(False)
await self._clean(True)
async def _clean(self, is_network_blob=False):
space_used_mb = await self.get_space_used_mb(cached=False)
if is_network_blob:
space_used_mb = space_used_mb['network_storage']
else:
space_used_mb = space_used_mb['content_storage'] + space_used_mb['private_storage']
storage_limit_mb = self.config.network_storage_limit if is_network_blob else self.config.blob_storage_limit
if self.analytics:
asyncio.create_task(
self.analytics.send_disk_space_used(space_used_mb, storage_limit_mb, is_network_blob)
)
delete = []
available = storage_limit_mb - space_used_mb
if storage_limit_mb == 0 if not is_network_blob else available >= 0:
return 0
for blob_hash, file_size, _ in await self.db.get_stored_blobs(is_mine=False, is_network_blob=is_network_blob):
delete.append(blob_hash)
available += int(file_size/1024.0/1024.0)
if available >= 0:
break
if delete:
await self.db.stop_all_files()
await self.blob_manager.delete_blobs(delete, delete_from_db=True)
self._used_space_bytes = None
return len(delete)
async def cleaning_loop(self):
while self.running:
await asyncio.sleep(self.cleaning_interval)
await self.clean()
async def start(self):
self.running = True
self.task = asyncio.create_task(self.cleaning_loop())
self.task.add_done_callback(lambda _: log.info("Stopping blob cleanup service."))
async def stop(self):
if self.running:
self.running = False
self.task.cancel()

View file

@ -1,2 +0,0 @@
CENT = 1000000
COIN = 100*CENT

View file

@ -1,86 +0,0 @@
from lbry.crypto.hash import double_sha256
from lbry.crypto.util import bytes_to_int, int_to_bytes
class Base58Error(Exception):
""" Exception used for Base58 errors. """
class Base58:
""" Class providing base 58 functionality. """
chars = '123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
assert len(chars) == 58
char_map = {c: n for n, c in enumerate(chars)}
@classmethod
def char_value(cls, c):
val = cls.char_map.get(c)
if val is None:
raise Base58Error(f'invalid base 58 character "{c}"')
return val
@classmethod
def decode(cls, txt):
""" Decodes txt into a big-endian bytearray. """
if isinstance(txt, memoryview):
txt = str(txt)
if isinstance(txt, bytes):
txt = txt.decode()
if not isinstance(txt, str):
raise TypeError('a string is required')
if not txt:
raise Base58Error('string cannot be empty')
value = 0
for c in txt:
value = value * 58 + cls.char_value(c)
result = int_to_bytes(value)
# Prepend leading zero bytes if necessary
count = 0
for c in txt:
if c != '1':
break
count += 1
if count:
result = bytes((0,)) * count + result
return result
@classmethod
def encode(cls, be_bytes):
"""Converts a big-endian bytearray into a base58 string."""
value = bytes_to_int(be_bytes)
txt = ''
while value:
value, mod = divmod(value, 58)
txt += cls.chars[mod]
for byte in be_bytes:
if byte != 0:
break
txt += '1'
return txt[::-1]
@classmethod
def decode_check(cls, txt, hash_fn=double_sha256):
""" Decodes a Base58Check-encoded string to a payload. The version prefixes it. """
be_bytes = cls.decode(txt)
result, check = be_bytes[:-4], be_bytes[-4:]
if check != hash_fn(result)[:4]:
raise Base58Error(f'invalid base 58 checksum for {txt}')
return result
@classmethod
def encode_check(cls, payload, hash_fn=double_sha256):
""" Encodes a payload bytearray (which includes the version byte(s))
into a Base58Check string."""
be_bytes = payload + hash_fn(payload)[:4]
return cls.encode(be_bytes)

View file

@ -1,71 +0,0 @@
import os
import base64
import typing
from cryptography.hazmat.primitives.kdf.scrypt import Scrypt
from cryptography.hazmat.primitives.ciphers import Cipher, modes
from cryptography.hazmat.primitives.ciphers.algorithms import AES
from cryptography.hazmat.primitives.padding import PKCS7
from cryptography.hazmat.backends import default_backend
from lbry.error import InvalidPasswordError
from lbry.crypto.hash import double_sha256
def aes_encrypt(secret: str, value: str, init_vector: bytes = None) -> str:
if init_vector is not None:
assert len(init_vector) == 16
else:
init_vector = os.urandom(16)
key = double_sha256(secret.encode())
encryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).encryptor()
padder = PKCS7(AES.block_size).padder()
padded_data = padder.update(value.encode()) + padder.finalize()
encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
return base64.b64encode(init_vector + encrypted_data).decode()
def aes_decrypt(secret: str, value: str) -> typing.Tuple[str, bytes]:
try:
data = base64.b64decode(value.encode())
key = double_sha256(secret.encode())
init_vector, data = data[:16], data[16:]
decryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).decryptor()
unpadder = PKCS7(AES.block_size).unpadder()
result = unpadder.update(decryptor.update(data)) + unpadder.finalize()
return result.decode(), init_vector
except UnicodeDecodeError:
raise InvalidPasswordError()
except ValueError as e:
if e.args[0] == 'Invalid padding bytes.':
raise InvalidPasswordError()
raise
def better_aes_encrypt(secret: str, value: bytes) -> bytes:
init_vector = os.urandom(16)
key = scrypt(secret.encode(), salt=init_vector)
encryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).encryptor()
padder = PKCS7(AES.block_size).padder()
padded_data = padder.update(value) + padder.finalize()
encrypted_data = encryptor.update(padded_data) + encryptor.finalize()
return base64.b64encode(b's:8192:16:1:' + init_vector + encrypted_data)
def better_aes_decrypt(secret: str, value: bytes) -> bytes:
try:
data = base64.b64decode(value)
_, scryp_n, scrypt_r, scrypt_p, data = data.split(b':', maxsplit=4)
init_vector, data = data[:16], data[16:]
key = scrypt(secret.encode(), init_vector, int(scryp_n), int(scrypt_r), int(scrypt_p))
decryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).decryptor()
unpadder = PKCS7(AES.block_size).unpadder()
return unpadder.update(decryptor.update(data)) + unpadder.finalize()
except ValueError as e:
if e.args[0] == 'Invalid padding bytes.':
raise InvalidPasswordError()
raise
def scrypt(passphrase, salt, scrypt_n=1<<13, scrypt_r=16, scrypt_p=1):
kdf = Scrypt(salt, length=32, n=scrypt_n, r=scrypt_r, p=scrypt_p, backend=default_backend())
return kdf.derive(passphrase)

View file

@ -1,47 +0,0 @@
import hashlib
import hmac
from binascii import hexlify, unhexlify
def sha256(x):
""" Simple wrapper of hashlib sha256. """
return hashlib.sha256(x).digest()
def sha512(x):
""" Simple wrapper of hashlib sha512. """
return hashlib.sha512(x).digest()
def ripemd160(x):
""" Simple wrapper of hashlib ripemd160. """
h = hashlib.new('ripemd160')
h.update(x)
return h.digest()
def double_sha256(x):
""" SHA-256 of SHA-256, as used extensively in bitcoin. """
return sha256(sha256(x))
def hmac_sha512(key, msg):
""" Use SHA-512 to provide an HMAC. """
return hmac.new(key, msg, hashlib.sha512).digest()
def hash160(x):
""" RIPEMD-160 of SHA-256.
Used to make bitcoin addresses from pubkeys. """
return ripemd160(sha256(x))
def hash_to_hex_str(x):
""" Convert a big-endian binary hash to displayed hex string.
Display form of a binary hash is reversed and converted to hex. """
return hexlify(reversed(x))
def hex_str_to_hash(x):
""" Convert a displayed hex string to a binary hash. """
return reversed(unhexlify(x))

View file

@ -1,13 +0,0 @@
from binascii import unhexlify, hexlify
def bytes_to_int(be_bytes):
""" Interprets a big-endian sequence of bytes as an integer. """
return int(hexlify(be_bytes), 16)
def int_to_bytes(value):
""" Converts an integer to a big-endian sequence of bytes. """
length = (value.bit_length() + 7) // 8
s = '%x' % value
return unhexlify(('0' * (len(s) % 2) + s).zfill(length * 2))

View file

@ -1,78 +0,0 @@
import asyncio
import typing
import logging
from prometheus_client import Counter, Gauge
if typing.TYPE_CHECKING:
from lbry.dht.node import Node
from lbry.extras.daemon.storage import SQLiteStorage
log = logging.getLogger(__name__)
class BlobAnnouncer:
announcements_sent_metric = Counter(
"announcements_sent", "Number of announcements sent and their respective status.", namespace="dht_node",
labelnames=("peers", "error"),
)
announcement_queue_size_metric = Gauge(
"announcement_queue_size", "Number of hashes waiting to be announced.", namespace="dht_node",
labelnames=("scope",)
)
def __init__(self, loop: asyncio.AbstractEventLoop, node: 'Node', storage: 'SQLiteStorage'):
self.loop = loop
self.node = node
self.storage = storage
self.announce_task: asyncio.Task = None
self.announce_queue: typing.List[str] = []
self._done = asyncio.Event()
self.announced = set()
async def _run_consumer(self):
while self.announce_queue:
try:
blob_hash = self.announce_queue.pop()
peers = len(await self.node.announce_blob(blob_hash))
self.announcements_sent_metric.labels(peers=peers, error=False).inc()
if peers > 4:
self.announced.add(blob_hash)
else:
log.debug("failed to announce %s, could only find %d peers, retrying soon.", blob_hash[:8], peers)
except Exception as err:
self.announcements_sent_metric.labels(peers=0, error=True).inc()
log.warning("error announcing %s: %s", blob_hash[:8], str(err))
async def _announce(self, batch_size: typing.Optional[int] = 10):
while batch_size:
if not self.node.joined.is_set():
await self.node.joined.wait()
await asyncio.sleep(60)
if not self.node.protocol.routing_table.get_peers():
log.warning("No peers in DHT, announce round skipped")
continue
self.announce_queue.extend(await self.storage.get_blobs_to_announce())
self.announcement_queue_size_metric.labels(scope="global").set(len(self.announce_queue))
log.debug("announcer task wake up, %d blobs to announce", len(self.announce_queue))
while len(self.announce_queue) > 0:
log.info("%i blobs to announce", len(self.announce_queue))
await asyncio.gather(*[self._run_consumer() for _ in range(batch_size)])
announced = list(filter(None, self.announced))
if announced:
await self.storage.update_last_announced_blobs(announced)
log.info("announced %i blobs", len(announced))
self.announced.clear()
self._done.set()
self._done.clear()
def start(self, batch_size: typing.Optional[int] = 10):
assert not self.announce_task or self.announce_task.done(), "already running"
self.announce_task = self.loop.create_task(self._announce(batch_size))
def stop(self):
if self.announce_task and not self.announce_task.done():
self.announce_task.cancel()
def wait(self):
return self._done.wait()

View file

@ -1,40 +0,0 @@
import hashlib
import os
HASH_CLASS = hashlib.sha384 # pylint: disable=invalid-name
HASH_LENGTH = HASH_CLASS().digest_size
HASH_BITS = HASH_LENGTH * 8
ALPHA = 5
K = 8
SPLIT_BUCKETS_UNDER_INDEX = 1
REPLACEMENT_CACHE_SIZE = 8
RPC_TIMEOUT = 5.0
RPC_ATTEMPTS = 5
RPC_ATTEMPTS_PRUNING_WINDOW = 600
ITERATIVE_LOOKUP_DELAY = RPC_TIMEOUT / 2.0 # TODO: use config val / 2 if rpc timeout is provided
REFRESH_INTERVAL = 3600 # 1 hour
REPLICATE_INTERVAL = REFRESH_INTERVAL
DATA_EXPIRATION = 86400 # 24 hours
TOKEN_SECRET_REFRESH_INTERVAL = 300 # 5 minutes
MAYBE_PING_DELAY = 300 # 5 minutes
CHECK_REFRESH_INTERVAL = REFRESH_INTERVAL / 5
RPC_ID_LENGTH = 20
PROTOCOL_VERSION = 1
MSG_SIZE_LIMIT = 1400
def digest(data: bytes) -> bytes:
h = HASH_CLASS()
h.update(data)
return h.digest()
def generate_id(num=None) -> bytes:
if num is not None:
return digest(str(num).encode())
else:
return digest(os.urandom(32))
def generate_rpc_id(num=None) -> bytes:
return generate_id(num)[:RPC_ID_LENGTH]

View file

@ -1,282 +0,0 @@
import logging
import asyncio
import typing
import socket
from prometheus_client import Gauge
from lbry.utils import aclosing, resolve_host
from lbry.dht import constants
from lbry.dht.peer import make_kademlia_peer
from lbry.dht.protocol.distance import Distance
from lbry.dht.protocol.iterative_find import IterativeNodeFinder, IterativeValueFinder
from lbry.dht.protocol.protocol import KademliaProtocol
if typing.TYPE_CHECKING:
from lbry.dht.peer import PeerManager
from lbry.dht.peer import KademliaPeer
log = logging.getLogger(__name__)
class Node:
storing_peers_metric = Gauge(
"storing_peers", "Number of peers storing blobs announced to this node", namespace="dht_node",
labelnames=("scope",),
)
stored_blob_with_x_bytes_colliding = Gauge(
"stored_blobs_x_bytes_colliding", "Number of blobs with at least X bytes colliding with this node id prefix",
namespace="dht_node", labelnames=("amount",)
)
def __init__(self, loop: asyncio.AbstractEventLoop, peer_manager: 'PeerManager', node_id: bytes, udp_port: int,
internal_udp_port: int, peer_port: int, external_ip: str, rpc_timeout: float = constants.RPC_TIMEOUT,
split_buckets_under_index: int = constants.SPLIT_BUCKETS_UNDER_INDEX, is_bootstrap_node: bool = False,
storage: typing.Optional['SQLiteStorage'] = None):
self.loop = loop
self.internal_udp_port = internal_udp_port
self.protocol = KademliaProtocol(loop, peer_manager, node_id, external_ip, udp_port, peer_port, rpc_timeout,
split_buckets_under_index, is_bootstrap_node)
self.listening_port: asyncio.DatagramTransport = None
self.joined = asyncio.Event()
self._join_task: asyncio.Task = None
self._refresh_task: asyncio.Task = None
self._storage = storage
@property
def stored_blob_hashes(self):
return self.protocol.data_store.keys()
async def refresh_node(self, force_once=False):
while True:
# remove peers with expired blob announcements from the datastore
self.protocol.data_store.removed_expired_peers()
total_peers: typing.List['KademliaPeer'] = []
# add all peers in the routing table
total_peers.extend(self.protocol.routing_table.get_peers())
# add all the peers who have announced blobs to us
storing_peers = self.protocol.data_store.get_storing_contacts()
self.storing_peers_metric.labels("global").set(len(storing_peers))
total_peers.extend(storing_peers)
counts = {0: 0, 1: 0, 2: 0}
node_id = self.protocol.node_id
for blob_hash in self.protocol.data_store.keys():
bytes_colliding = 0 if blob_hash[0] != node_id[0] else 2 if blob_hash[1] == node_id[1] else 1
counts[bytes_colliding] += 1
self.stored_blob_with_x_bytes_colliding.labels(amount=0).set(counts[0])
self.stored_blob_with_x_bytes_colliding.labels(amount=1).set(counts[1])
self.stored_blob_with_x_bytes_colliding.labels(amount=2).set(counts[2])
# get ids falling in the midpoint of each bucket that hasn't been recently updated
node_ids = self.protocol.routing_table.get_refresh_list(0, True)
if self.protocol.routing_table.get_peers():
# if we have node ids to look up, perform the iterative search until we have k results
while node_ids:
peers = await self.peer_search(node_ids.pop())
total_peers.extend(peers)
else:
if force_once:
break
fut = asyncio.Future()
self.loop.call_later(constants.REFRESH_INTERVAL // 4, fut.set_result, None)
await fut
continue
# ping the set of peers; upon success/failure the routing able and last replied/failed time will be updated
to_ping = [peer for peer in set(total_peers) if self.protocol.peer_manager.peer_is_good(peer) is not True]
if to_ping:
self.protocol.ping_queue.enqueue_maybe_ping(*to_ping, delay=0)
if self._storage:
await self._storage.save_kademlia_peers(self.protocol.routing_table.get_peers())
if force_once:
break
fut = asyncio.Future()
self.loop.call_later(constants.REFRESH_INTERVAL, fut.set_result, None)
await fut
async def announce_blob(self, blob_hash: str) -> typing.List[bytes]:
hash_value = bytes.fromhex(blob_hash)
assert len(hash_value) == constants.HASH_LENGTH
peers = await self.peer_search(hash_value)
if not self.protocol.external_ip:
raise Exception("Cannot determine external IP")
log.debug("Store to %i peers", len(peers))
for peer in peers:
log.debug("store to %s %s %s", peer.address, peer.udp_port, peer.tcp_port)
stored_to_tup = await asyncio.gather(
*(self.protocol.store_to_peer(hash_value, peer) for peer in peers)
)
stored_to = [node_id for node_id, contacted in stored_to_tup if contacted]
if stored_to:
log.debug(
"Stored %s to %i of %i attempted peers", hash_value.hex()[:8],
len(stored_to), len(peers)
)
else:
log.debug("Failed announcing %s, stored to 0 peers", blob_hash[:8])
return stored_to
def stop(self) -> None:
if self.joined.is_set():
self.joined.clear()
if self._join_task:
self._join_task.cancel()
if self._refresh_task and not (self._refresh_task.done() or self._refresh_task.cancelled()):
self._refresh_task.cancel()
if self.protocol and self.protocol.ping_queue.running:
self.protocol.ping_queue.stop()
self.protocol.stop()
if self.listening_port is not None:
self.listening_port.close()
self._join_task = None
self.listening_port = None
log.info("Stopped DHT node")
async def start_listening(self, interface: str = '0.0.0.0') -> None:
if not self.listening_port:
self.listening_port, _ = await self.loop.create_datagram_endpoint(
lambda: self.protocol, (interface, self.internal_udp_port)
)
log.info("DHT node listening on UDP %s:%i", interface, self.internal_udp_port)
self.protocol.start()
else:
log.warning("Already bound to port %s", self.listening_port)
async def join_network(self, interface: str = '0.0.0.0',
known_node_urls: typing.Optional[typing.List[typing.Tuple[str, int]]] = None):
def peers_from_urls(urls: typing.Optional[typing.List[typing.Tuple[bytes, str, int, int]]]):
peer_addresses = []
for node_id, address, udp_port, tcp_port in urls:
if (node_id, address, udp_port, tcp_port) not in peer_addresses and \
(address, udp_port) != (self.protocol.external_ip, self.protocol.udp_port):
peer_addresses.append((node_id, address, udp_port, tcp_port))
return [make_kademlia_peer(*peer_address) for peer_address in peer_addresses]
if not self.listening_port:
await self.start_listening(interface)
self.protocol.ping_queue.start()
self._refresh_task = self.loop.create_task(self.refresh_node())
while True:
if self.protocol.routing_table.get_peers():
if not self.joined.is_set():
self.joined.set()
log.info(
"joined dht, %i peers known in %i buckets", len(self.protocol.routing_table.get_peers()),
self.protocol.routing_table.buckets_with_contacts()
)
else:
if self.joined.is_set():
self.joined.clear()
seed_peers = peers_from_urls(
await self._storage.get_persisted_kademlia_peers()
) if self._storage else []
if not seed_peers:
try:
seed_peers.extend(peers_from_urls([
(None, await resolve_host(address, udp_port, 'udp'), udp_port, None)
for address, udp_port in known_node_urls or []
]))
except socket.gaierror:
await asyncio.sleep(30)
continue
self.protocol.peer_manager.reset()
self.protocol.ping_queue.enqueue_maybe_ping(*seed_peers, delay=0.0)
await self.peer_search(self.protocol.node_id, shortlist=seed_peers, count=32)
await asyncio.sleep(1)
def start(self, interface: str, known_node_urls: typing.Optional[typing.List[typing.Tuple[str, int]]] = None):
self._join_task = self.loop.create_task(self.join_network(interface, known_node_urls))
def get_iterative_node_finder(self, key: bytes, shortlist: typing.Optional[typing.List['KademliaPeer']] = None,
max_results: int = constants.K) -> IterativeNodeFinder:
shortlist = shortlist or self.protocol.routing_table.find_close_peers(key)
return IterativeNodeFinder(self.loop, self.protocol, key, max_results, shortlist)
def get_iterative_value_finder(self, key: bytes, shortlist: typing.Optional[typing.List['KademliaPeer']] = None,
max_results: int = -1) -> IterativeValueFinder:
shortlist = shortlist or self.protocol.routing_table.find_close_peers(key)
return IterativeValueFinder(self.loop, self.protocol, key, max_results, shortlist)
async def peer_search(self, node_id: bytes, count=constants.K, max_results=constants.K * 2,
shortlist: typing.Optional[typing.List['KademliaPeer']] = None
) -> typing.List['KademliaPeer']:
peers = []
async with aclosing(self.get_iterative_node_finder(
node_id, shortlist=shortlist, max_results=max_results)) as node_finder:
async for iteration_peers in node_finder:
peers.extend(iteration_peers)
distance = Distance(node_id)
peers.sort(key=lambda peer: distance(peer.node_id))
return peers[:count]
async def _accumulate_peers_for_value(self, search_queue: asyncio.Queue, result_queue: asyncio.Queue):
tasks = []
try:
while True:
blob_hash = await search_queue.get()
tasks.append(self.loop.create_task(self._peers_for_value_producer(blob_hash, result_queue)))
finally:
for task in tasks:
task.cancel()
async def _peers_for_value_producer(self, blob_hash: str, result_queue: asyncio.Queue):
async def put_into_result_queue_after_pong(_peer):
try:
await self.protocol.get_rpc_peer(_peer).ping()
result_queue.put_nowait([_peer])
log.debug("pong from %s:%i for %s", _peer.address, _peer.udp_port, blob_hash)
except asyncio.TimeoutError:
pass
# prioritize peers who reply to a dht ping first
# this minimizes attempting to make tcp connections that won't work later to dead or unreachable peers
async with aclosing(self.get_iterative_value_finder(bytes.fromhex(blob_hash))) as value_finder:
async for results in value_finder:
to_put = []
for peer in results:
if peer.address == self.protocol.external_ip and self.protocol.peer_port == peer.tcp_port:
continue
is_good = self.protocol.peer_manager.peer_is_good(peer)
if is_good:
# the peer has replied recently over UDP, it can probably be reached on the TCP port
to_put.append(peer)
elif is_good is None:
if not peer.udp_port:
# TODO: use the same port for TCP and UDP
# the udp port must be guessed
# default to the ports being the same. if the TCP port appears to be <=0.48.0 default,
# including on a network with several nodes, then assume the udp port is proportionately
# based on a starting port of 4444
udp_port_to_try = peer.tcp_port
if 3400 > peer.tcp_port > 3332:
udp_port_to_try = (peer.tcp_port - 3333) + 4444
self.loop.create_task(put_into_result_queue_after_pong(
make_kademlia_peer(peer.node_id, peer.address, udp_port_to_try, peer.tcp_port)
))
else:
self.loop.create_task(put_into_result_queue_after_pong(peer))
else:
# the peer is known to be bad/unreachable, skip trying to connect to it over TCP
log.debug("skip bad peer %s:%i for %s", peer.address, peer.tcp_port, blob_hash)
if to_put:
result_queue.put_nowait(to_put)
def accumulate_peers(self, search_queue: asyncio.Queue,
peer_queue: typing.Optional[asyncio.Queue] = None
) -> typing.Tuple[asyncio.Queue, asyncio.Task]:
queue = peer_queue or asyncio.Queue()
return queue, self.loop.create_task(self._accumulate_peers_for_value(search_queue, queue))
async def get_kademlia_peers_from_hosts(peer_list: typing.List[typing.Tuple[str, int]]) -> typing.List['KademliaPeer']:
peer_address_list = [(await resolve_host(url, port, proto='tcp'), port) for url, port in peer_list]
kademlia_peer_list = [make_kademlia_peer(None, address, None, tcp_port=port, allow_localhost=True)
for address, port in peer_address_list]
return kademlia_peer_list

View file

@ -1,361 +0,0 @@
import asyncio
from itertools import chain
from collections import defaultdict, OrderedDict
from collections.abc import AsyncIterator
import typing
import logging
from typing import TYPE_CHECKING
from lbry.dht import constants
from lbry.dht.error import RemoteException, TransportNotConnected
from lbry.dht.protocol.distance import Distance
from lbry.dht.peer import make_kademlia_peer, decode_tcp_peer_from_compact_address
from lbry.dht.serialization.datagram import PAGE_KEY
if TYPE_CHECKING:
from lbry.dht.protocol.protocol import KademliaProtocol
from lbry.dht.peer import PeerManager, KademliaPeer
log = logging.getLogger(__name__)
class FindResponse:
@property
def found(self) -> bool:
raise NotImplementedError()
def get_close_triples(self) -> typing.List[typing.Tuple[bytes, str, int]]:
raise NotImplementedError()
def get_close_kademlia_peers(self, peer_info) -> typing.Generator[typing.Iterator['KademliaPeer'], None, None]:
for contact_triple in self.get_close_triples():
node_id, address, udp_port = contact_triple
try:
yield make_kademlia_peer(node_id, address, udp_port)
except ValueError:
log.warning("misbehaving peer %s:%i returned peer with reserved ip %s:%i", peer_info.address,
peer_info.udp_port, address, udp_port)
class FindNodeResponse(FindResponse):
def __init__(self, key: bytes, close_triples: typing.List[typing.Tuple[bytes, str, int]]):
self.key = key
self.close_triples = close_triples
@property
def found(self) -> bool:
return self.key in [triple[0] for triple in self.close_triples]
def get_close_triples(self) -> typing.List[typing.Tuple[bytes, str, int]]:
return self.close_triples
class FindValueResponse(FindResponse):
def __init__(self, key: bytes, result_dict: typing.Dict):
self.key = key
self.token = result_dict[b'token']
self.close_triples: typing.List[typing.Tuple[bytes, bytes, int]] = result_dict.get(b'contacts', [])
self.found_compact_addresses = result_dict.get(key, [])
self.pages = int(result_dict.get(PAGE_KEY, 0))
@property
def found(self) -> bool:
return len(self.found_compact_addresses) > 0
def get_close_triples(self) -> typing.List[typing.Tuple[bytes, str, int]]:
return [(node_id, address.decode(), port) for node_id, address, port in self.close_triples]
class IterativeFinder(AsyncIterator):
def __init__(self, loop: asyncio.AbstractEventLoop,
protocol: 'KademliaProtocol', key: bytes,
max_results: typing.Optional[int] = constants.K,
shortlist: typing.Optional[typing.List['KademliaPeer']] = None):
if len(key) != constants.HASH_LENGTH:
raise ValueError("invalid key length: %i" % len(key))
self.loop = loop
self.peer_manager = protocol.peer_manager
self.protocol = protocol
self.key = key
self.max_results = max(constants.K, max_results)
self.active: typing.Dict['KademliaPeer', int] = OrderedDict() # peer: distance, sorted
self.contacted: typing.Set['KademliaPeer'] = set()
self.distance = Distance(key)
self.iteration_queue = asyncio.Queue()
self.running_probes: typing.Dict['KademliaPeer', asyncio.Task] = {}
self.iteration_count = 0
self.running = False
self.tasks: typing.List[asyncio.Task] = []
for peer in shortlist:
if peer.node_id:
self._add_active(peer, force=True)
else:
# seed nodes
self._schedule_probe(peer)
async def send_probe(self, peer: 'KademliaPeer') -> FindResponse:
"""
Send the rpc request to the peer and return an object with the FindResponse interface
"""
raise NotImplementedError()
def search_exhausted(self):
"""
This method ends the iterator due no more peers to contact.
Override to provide last time results.
"""
self.iteration_queue.put_nowait(None)
def check_result_ready(self, response: FindResponse):
"""
Called after adding peers from an rpc result to the shortlist.
This method is responsible for putting a result for the generator into the Queue
"""
raise NotImplementedError()
def get_initial_result(self) -> typing.List['KademliaPeer']: #pylint: disable=no-self-use
"""
Get an initial or cached result to be put into the Queue. Used for findValue requests where the blob
has peers in the local data store of blobs announced to us
"""
return []
def _add_active(self, peer, force=False):
if not force and self.peer_manager.peer_is_good(peer) is False:
return
if peer in self.contacted:
return
if peer not in self.active and peer.node_id and peer.node_id != self.protocol.node_id:
self.active[peer] = self.distance(peer.node_id)
self.active = OrderedDict(sorted(self.active.items(), key=lambda item: item[1]))
async def _handle_probe_result(self, peer: 'KademliaPeer', response: FindResponse):
self._add_active(peer)
for new_peer in response.get_close_kademlia_peers(peer):
self._add_active(new_peer)
self.check_result_ready(response)
self._log_state(reason="check result")
def _reset_closest(self, peer):
if peer in self.active:
del self.active[peer]
async def _send_probe(self, peer: 'KademliaPeer'):
try:
response = await self.send_probe(peer)
except asyncio.TimeoutError:
self._reset_closest(peer)
return
except asyncio.CancelledError:
log.debug("%s[%x] cancelled probe",
type(self).__name__, id(self))
raise
except ValueError as err:
log.warning(str(err))
self._reset_closest(peer)
return
except TransportNotConnected:
await self._aclose(reason="not connected")
return
except RemoteException:
self._reset_closest(peer)
return
return await self._handle_probe_result(peer, response)
def _search_round(self):
"""
Send up to constants.alpha (5) probes to closest active peers
"""
added = 0
for index, peer in enumerate(self.active.keys()):
if index == 0:
log.debug("%s[%x] closest to probe: %s",
type(self).__name__, id(self),
peer.node_id.hex()[:8])
if peer in self.contacted:
continue
if len(self.running_probes) >= constants.ALPHA:
break
if index > (constants.K + len(self.running_probes)):
break
origin_address = (peer.address, peer.udp_port)
if peer.node_id == self.protocol.node_id:
continue
if origin_address == (self.protocol.external_ip, self.protocol.udp_port):
continue
self._schedule_probe(peer)
added += 1
log.debug("%s[%x] running %d probes for key %s",
type(self).__name__, id(self),
len(self.running_probes), self.key.hex()[:8])
if not added and not self.running_probes:
log.debug("%s[%x] search for %s exhausted",
type(self).__name__, id(self),
self.key.hex()[:8])
self.search_exhausted()
def _schedule_probe(self, peer: 'KademliaPeer'):
self.contacted.add(peer)
t = self.loop.create_task(self._send_probe(peer))
def callback(_):
self.running_probes.pop(peer, None)
if self.running:
self._search_round()
t.add_done_callback(callback)
self.running_probes[peer] = t
def _log_state(self, reason="?"):
log.debug("%s[%x] [%s] %s: %i active nodes %i contacted %i produced %i queued",
type(self).__name__, id(self), self.key.hex()[:8],
reason, len(self.active), len(self.contacted),
self.iteration_count, self.iteration_queue.qsize())
def __aiter__(self):
if self.running:
raise Exception("already running")
self.running = True
self.loop.call_soon(self._search_round)
return self
async def __anext__(self) -> typing.List['KademliaPeer']:
try:
if self.iteration_count == 0:
result = self.get_initial_result() or await self.iteration_queue.get()
else:
result = await self.iteration_queue.get()
if not result:
raise StopAsyncIteration
self.iteration_count += 1
return result
except asyncio.CancelledError:
await self._aclose(reason="cancelled")
raise
except StopAsyncIteration:
await self._aclose(reason="no more results")
raise
async def _aclose(self, reason="?"):
log.debug("%s[%x] [%s] shutdown because %s: %i active nodes %i contacted %i produced %i queued",
type(self).__name__, id(self), self.key.hex()[:8],
reason, len(self.active), len(self.contacted),
self.iteration_count, self.iteration_queue.qsize())
self.running = False
self.iteration_queue.put_nowait(None)
for task in chain(self.tasks, self.running_probes.values()):
task.cancel()
self.tasks.clear()
self.running_probes.clear()
async def aclose(self):
if self.running:
await self._aclose(reason="aclose")
log.debug("%s[%x] [%s] async close completed",
type(self).__name__, id(self), self.key.hex()[:8])
class IterativeNodeFinder(IterativeFinder):
def __init__(self, loop: asyncio.AbstractEventLoop,
protocol: 'KademliaProtocol', key: bytes,
max_results: typing.Optional[int] = constants.K,
shortlist: typing.Optional[typing.List['KademliaPeer']] = None):
super().__init__(loop, protocol, key, max_results, shortlist)
self.yielded_peers: typing.Set['KademliaPeer'] = set()
async def send_probe(self, peer: 'KademliaPeer') -> FindNodeResponse:
log.debug("probe %s:%d (%s) for NODE %s",
peer.address, peer.udp_port, peer.node_id.hex()[:8] if peer.node_id else '', self.key.hex()[:8])
response = await self.protocol.get_rpc_peer(peer).find_node(self.key)
return FindNodeResponse(self.key, response)
def search_exhausted(self):
self.put_result(self.active.keys(), finish=True)
def put_result(self, from_iter: typing.Iterable['KademliaPeer'], finish=False):
not_yet_yielded = [
peer for peer in from_iter
if peer not in self.yielded_peers
and peer.node_id != self.protocol.node_id
and self.peer_manager.peer_is_good(peer) is True # return only peers who answered
]
not_yet_yielded.sort(key=lambda peer: self.distance(peer.node_id))
to_yield = not_yet_yielded[:max(constants.K, self.max_results)]
if to_yield:
self.yielded_peers.update(to_yield)
self.iteration_queue.put_nowait(to_yield)
if finish:
self.iteration_queue.put_nowait(None)
def check_result_ready(self, response: FindNodeResponse):
found = response.found and self.key != self.protocol.node_id
if found:
log.debug("found")
return self.put_result(self.active.keys(), finish=True)
class IterativeValueFinder(IterativeFinder):
def __init__(self, loop: asyncio.AbstractEventLoop,
protocol: 'KademliaProtocol', key: bytes,
max_results: typing.Optional[int] = constants.K,
shortlist: typing.Optional[typing.List['KademliaPeer']] = None):
super().__init__(loop, protocol, key, max_results, shortlist)
self.blob_peers: typing.Set['KademliaPeer'] = set()
# this tracks the index of the most recent page we requested from each peer
self.peer_pages: typing.DefaultDict['KademliaPeer', int] = defaultdict(int)
# this tracks the set of blob peers returned by each peer
self.discovered_peers: typing.Dict['KademliaPeer', typing.Set['KademliaPeer']] = defaultdict(set)
async def send_probe(self, peer: 'KademliaPeer') -> FindValueResponse:
log.debug("probe %s:%d (%s) for VALUE %s",
peer.address, peer.udp_port, peer.node_id.hex()[:8], self.key.hex()[:8])
page = self.peer_pages[peer]
response = await self.protocol.get_rpc_peer(peer).find_value(self.key, page=page)
parsed = FindValueResponse(self.key, response)
if not parsed.found:
return parsed
already_known = len(self.discovered_peers[peer])
decoded_peers = set()
for compact_addr in parsed.found_compact_addresses:
try:
decoded_peers.add(decode_tcp_peer_from_compact_address(compact_addr))
except ValueError:
log.warning("misbehaving peer %s:%i returned invalid peer for blob",
peer.address, peer.udp_port)
self.peer_manager.report_failure(peer.address, peer.udp_port)
parsed.found_compact_addresses.clear()
return parsed
self.discovered_peers[peer].update(decoded_peers)
log.debug("probed %s:%i page %i, %i known", peer.address, peer.udp_port, page,
already_known + len(parsed.found_compact_addresses))
if len(self.discovered_peers[peer]) != already_known + len(parsed.found_compact_addresses):
log.warning("misbehaving peer %s:%i returned duplicate peers for blob", peer.address, peer.udp_port)
elif len(parsed.found_compact_addresses) >= constants.K and self.peer_pages[peer] < parsed.pages:
# the peer returned a full page and indicates it has more
self.peer_pages[peer] += 1
if peer in self.contacted:
# the peer must be removed from self.contacted so that it will be probed for the next page
self.contacted.remove(peer)
return parsed
def check_result_ready(self, response: FindValueResponse):
if response.found:
blob_peers = [decode_tcp_peer_from_compact_address(compact_addr)
for compact_addr in response.found_compact_addresses]
to_yield = []
for blob_peer in blob_peers:
if blob_peer not in self.blob_peers:
self.blob_peers.add(blob_peer)
to_yield.append(blob_peer)
if to_yield:
self.iteration_queue.put_nowait(to_yield)
def get_initial_result(self) -> typing.List['KademliaPeer']:
if self.protocol.data_store.has_peers_for_blob(self.key):
return self.protocol.data_store.get_peers_for_blob(self.key)
return []

307
lbry/docs/404.html Normal file
View file

@ -0,0 +1,307 @@
<!DOCTYPE html>
<html lang="en" class="no-js">
<head>
<meta charset="utf-8">
<meta name="viewport" content="width=device-width,initial-scale=1">
<meta http-equiv="x-ua-compatible" content="ie=edge">
<meta name="lang:clipboard.copy" content="Copy to clipboard">
<meta name="lang:clipboard.copied" content="Copied to clipboard">
<meta name="lang:search.language" content="en">
<meta name="lang:search.pipeline.stopwords" content="True">
<meta name="lang:search.pipeline.trimmer" content="True">
<meta name="lang:search.result.none" content="No matching documents">
<meta name="lang:search.result.one" content="1 matching document">
<meta name="lang:search.result.other" content="# matching documents">
<meta name="lang:search.tokenizer" content="[\s\-]+">
<link rel="shortcut icon" href="/assets/images/favicon.png">
<meta name="generator" content="mkdocs-0.17.3, mkdocs-material-2.7.0">
<title>LBRY</title>
<link rel="stylesheet" href="/assets/stylesheets/application.78aab2dc.css">
<link rel="stylesheet" href="/assets/stylesheets/application-palette.6079476c.css">
<script src="/assets/javascripts/modernizr.1aa3b519.js"></script>
<link href="https://fonts.gstatic.com" rel="preconnect" crossorigin>
<link rel="stylesheet" href="https://fonts.googleapis.com/css?family=Roboto:300,400,400i,700|Roboto+Mono">
<style>body,input{font-family:"Roboto","Helvetica Neue",Helvetica,Arial,sans-serif}code,kbd,pre{font-family:"Roboto Mono","Courier New",Courier,monospace}</style>
<link rel="stylesheet" href="https://fonts.googleapis.com/icon?family=Material+Icons">
</head>
<body dir="ltr" data-md-color-primary="teal" data-md-color-accent="green">
<svg class="md-svg">
<defs>
<svg xmlns="http://www.w3.org/2000/svg" width="416" height="448"
viewBox="0 0 416 448" id="github">
<path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19-18.125
8.5-18.125-8.5-10.75-19-3.125-20.5 3.125-20.5 10.75-19 18.125-8.5
18.125 8.5 10.75 19 3.125 20.5zM320 304q0 10-3.125 20.5t-10.75
19-18.125 8.5-18.125-8.5-10.75-19-3.125-20.5 3.125-20.5 10.75-19
18.125-8.5 18.125 8.5 10.75 19 3.125 20.5zM360
304q0-30-17.25-51t-46.75-21q-10.25 0-48.75 5.25-17.75 2.75-39.25
2.75t-39.25-2.75q-38-5.25-48.75-5.25-29.5 0-46.75 21t-17.25 51q0 22 8
38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0
37.25-1.75t35-7.375 30.5-15 20.25-25.75 8-38.375zM416 260q0 51.75-15.25
82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5-41.75
1.125q-19.5 0-35.5-0.75t-36.875-3.125-38.125-7.5-34.25-12.875-30.25-20.25-21.5-28.75q-15.5-30.75-15.5-82.75
0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25
30.875q36.75-8.75 77.25-8.75 37 0 70 8 26.25-20.5
46.75-30.25t47.25-9.75q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34
99.5z" />
</svg>
</defs>
</svg>
<input class="md-toggle" data-md-toggle="drawer" type="checkbox" id="drawer">
<input class="md-toggle" data-md-toggle="search" type="checkbox" id="search">
<label class="md-overlay" data-md-component="overlay" for="drawer"></label>
<header class="md-header" data-md-component="header">
<nav class="md-header-nav md-grid">
<div class="md-flex">
<div class="md-flex__cell md-flex__cell--shrink">
<a href="/" title="LBRY" class="md-header-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="24" height="24">
</a>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--menu md-header-nav__button" for="drawer"></label>
</div>
<div class="md-flex__cell md-flex__cell--stretch">
<div class="md-flex__ellipsis md-header-nav__title" data-md-component="title">
<span class="md-header-nav__topic">
LBRY
</span>
<span class="md-header-nav__topic">
</span>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<label class="md-icon md-icon--search md-header-nav__button" for="search"></label>
<div class="md-search" data-md-component="search" role="dialog">
<label class="md-search__overlay" for="search"></label>
<div class="md-search__inner" role="search">
<form class="md-search__form" name="search">
<input type="text" class="md-search__input" name="query" placeholder="Search" autocapitalize="off" autocorrect="off" autocomplete="off" spellcheck="false" data-md-component="query" data-md-state="active">
<label class="md-icon md-search__icon" for="search"></label>
<button type="reset" class="md-icon md-search__icon" data-md-component="reset" tabindex="-1">
&#xE5CD;
</button>
</form>
<div class="md-search__output">
<div class="md-search__scrollwrap" data-md-scrollfix>
<div class="md-search-result" data-md-component="result">
<div class="md-search-result__meta">
Type to start searching
</div>
<ol class="md-search-result__list"></ol>
</div>
</div>
</div>
</div>
</div>
</div>
<div class="md-flex__cell md-flex__cell--shrink">
<div class="md-header-nav__source">
<a href="https://github.com/lbryio/lbry/" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg viewBox="0 0 24 24" width="24" height="24">
<use xlink:href="#github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
GitHub
</div>
</a>
</div>
</div>
</div>
</nav>
</header>
<div class="md-container">
<main class="md-main">
<div class="md-main__inner md-grid" data-md-component="container">
<div class="md-sidebar md-sidebar--primary" data-md-component="navigation">
<div class="md-sidebar__scrollwrap">
<div class="md-sidebar__inner">
<nav class="md-nav md-nav--primary" data-md-level="0">
<label class="md-nav__title md-nav__title--site" for="drawer">
<span class="md-nav__button md-logo">
<img src="https://s3.amazonaws.com/files.lbry.io/logo-square-white-bookonly.png" alt="LBRY logo" width="48" height="48">
</span>
LBRY
</label>
<div class="md-nav__source">
<a href="https://github.com/lbryio/lbry/" title="Go to repository" class="md-source" data-md-source="github">
<div class="md-source__icon">
<svg viewBox="0 0 24 24" width="24" height="24">
<use xlink:href="#github" width="24" height="24"></use>
</svg>
</div>
<div class="md-source__repository">
GitHub
</div>
</a>
</div>
<ul class="md-nav__list" data-md-scrollfix>
<li class="md-nav__item">
<a href="/" title="API" class="md-nav__link">
API
</a>
</li>
<li class="md-nav__item">
<a href="/cli/" title="CLI" class="md-nav__link">
CLI
</a>
</li>
</ul>
</nav>
</div>
</div>
</div>
<div class="md-content">
<article class="md-content__inner md-typeset">
<h1>404 - Not found</h1>
</article>
</div>
</div>
</main>
<footer class="md-footer">
<div class="md-footer-meta md-typeset">
<div class="md-footer-meta__inner md-grid">
<div class="md-footer-copyright">
powered by
<a href="http://www.mkdocs.org">MkDocs</a>
and
<a href="https://squidfunk.github.io/mkdocs-material/">
Material for MkDocs</a>
</div>
</div>
</div>
</footer>
</div>
<script src="/assets/javascripts/application.8eb9be28.js"></script>
<script>app.initialize({version:"0.17.3",url:{base:""}})</script>
<script>!function(e,a,t,n,o,c,i){e.GoogleAnalyticsObject=o,e.ga=e.ga||function(){(e.ga.q=e.ga.q||[]).push(arguments)},e.ga.l=1*new Date,c=a.createElement(t),i=a.getElementsByTagName(t)[0],c.async=1,c.src="https://www.google-analytics.com/analytics.js",i.parentNode.insertBefore(c,i)}(window,document,"script",0,"ga"),ga("create","UA-60403362-1","auto"),ga("set","anonymizeIp",!0),ga("send","pageview");var links=document.getElementsByTagName("a");if(Array.prototype.map.call(links,function(e){e.host!=document.location.host&&e.addEventListener("click",function(){var a=e.getAttribute("data-md-action")||"follow";ga("send","event","outbound",a,e.href)})}),document.forms.search){var query=document.forms.search.query;query.addEventListener("blur",function(){if(this.value){var e=document.location.pathname;ga("send","pageview",e+"?q="+this.value)}})}</script>
</body>
</html>

3142
lbry/docs/api.json Normal file

File diff suppressed because one or more lines are too long

Binary file not shown.

After

Width:  |  Height:  |  Size: 521 B

View file

@ -0,0 +1,20 @@
<svg xmlns="http://www.w3.org/2000/svg" width="352" height="448"
viewBox="0 0 352 448" id="bitbucket">
<path fill="currentColor" d="M203.75 214.75q2 15.75-12.625 25.25t-27.875
1.5q-9.75-4.25-13.375-14.5t-0.125-20.5 13-14.5q9-4.5 18.125-3t16 8.875
6.875 16.875zM231.5 209.5q-3.5-26.75-28.25-41t-49.25-3.25q-15.75
7-25.125 22.125t-8.625 32.375q1 22.75 19.375 38.75t41.375 14q22.75-2
38-21t12.5-42zM291.25
74q-5-6.75-14-11.125t-14.5-5.5-17.75-3.125q-72.75-11.75-141.5 0.5-10.75
1.75-16.5 3t-13.75 5.5-12.5 10.75q7.5 7 19 11.375t18.375 5.5 21.875
2.875q57 7.25 112 0.25 15.75-2 22.375-3t18.125-5.375 18.75-11.625zM305.5
332.75q-2 6.5-3.875 19.125t-3.5 21-7.125 17.5-14.5 14.125q-21.5
12-47.375 17.875t-50.5 5.5-50.375-4.625q-11.5-2-20.375-4.5t-19.125-6.75-18.25-10.875-13-15.375q-6.25-24-14.25-73l1.5-4
4.5-2.25q55.75 37 126.625 37t126.875-37q5.25 1.5 6 5.75t-1.25 11.25-2
9.25zM350.75 92.5q-6.5 41.75-27.75 163.75-1.25 7.5-6.75 14t-10.875
10-13.625 7.75q-63 31.5-152.5
22-62-6.75-98.5-34.75-3.75-3-6.375-6.625t-4.25-8.75-2.25-8.5-1.5-9.875-1.375-8.75q-2.25-12.5-6.625-37.5t-7-40.375-5.875-36.875-5.5-39.5q0.75-6.5
4.375-12.125t7.875-9.375 11.25-7.5 11.5-5.625 12-4.625q31.25-11.5
78.25-16 94.75-9.25 169 12.5 38.75 11.5 53.75 30.5 4 5 4.125
12.75t-1.375 13.5z" />
</svg>

After

Width:  |  Height:  |  Size: 1.4 KiB

View file

@ -0,0 +1,18 @@
<svg xmlns="http://www.w3.org/2000/svg" width="416" height="448"
viewBox="0 0 416 448" id="github">
<path fill="currentColor" d="M160 304q0 10-3.125 20.5t-10.75 19-18.125
8.5-18.125-8.5-10.75-19-3.125-20.5 3.125-20.5 10.75-19 18.125-8.5
18.125 8.5 10.75 19 3.125 20.5zM320 304q0 10-3.125 20.5t-10.75
19-18.125 8.5-18.125-8.5-10.75-19-3.125-20.5 3.125-20.5 10.75-19
18.125-8.5 18.125 8.5 10.75 19 3.125 20.5zM360
304q0-30-17.25-51t-46.75-21q-10.25 0-48.75 5.25-17.75 2.75-39.25
2.75t-39.25-2.75q-38-5.25-48.75-5.25-29.5 0-46.75 21t-17.25 51q0 22 8
38.375t20.25 25.75 30.5 15 35 7.375 37.25 1.75h42q20.5 0
37.25-1.75t35-7.375 30.5-15 20.25-25.75 8-38.375zM416 260q0 51.75-15.25
82.75-9.5 19.25-26.375 33.25t-35.25 21.5-42.5 11.875-42.875 5.5-41.75
1.125q-19.5 0-35.5-0.75t-36.875-3.125-38.125-7.5-34.25-12.875-30.25-20.25-21.5-28.75q-15.5-30.75-15.5-82.75
0-59.25 34-99-6.75-20.5-6.75-42.5 0-29 12.75-54.5 27 0 47.5 9.875t47.25
30.875q36.75-8.75 77.25-8.75 37 0 70 8 26.25-20.5
46.75-30.25t47.25-9.75q12.75 25.5 12.75 54.5 0 21.75-6.75 42 34 40 34
99.5z" />
</svg>

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

@ -0,0 +1,38 @@
<svg xmlns="http://www.w3.org/2000/svg" width="500" height="500"
viewBox="0 0 500 500" id="gitlab">
<g transform="translate(156.197863, 1.160267)">
<path fill="currentColor"
d="M93.667,473.347L93.667,473.347l90.684-279.097H2.983L93.667,
473.347L93.667,473.347z" />
</g>
<g transform="translate(28.531199, 1.160800)" opacity="0.7">
<path fill="currentColor"
d="M221.333,473.345L130.649,194.25H3.557L221.333,473.345L221.333,
473.345z" />
</g>
<g transform="translate(0.088533, 0.255867)" opacity="0.5">
<path fill="currentColor"
d="M32,195.155L32,195.155L4.441,279.97c-2.513,7.735,0.24,16.21,6.821,
20.99l238.514,173.29 L32,195.155L32,195.155z" />
</g>
<g transform="translate(29.421866, 280.255593)">
<path fill="currentColor"
d="M2.667-84.844h127.092L75.14-252.942c-2.811-8.649-15.047-8.649-17.856,
0L2.667-84.844 L2.667-84.844z" />
</g>
<g transform="translate(247.197860, 1.160800)" opacity="0.7">
<path fill="currentColor"
d="M2.667,473.345L93.351,194.25h127.092L2.667,473.345L2.667,
473.345z" />
</g>
<g transform="translate(246.307061, 0.255867)" opacity="0.5">
<path fill="currentColor"
d="M221.334,195.155L221.334,195.155l27.559,84.815c2.514,7.735-0.24,
16.21-6.821,20.99 L3.557,474.25L221.334,195.155L221.334,195.155z" />
</g>
<g transform="translate(336.973725, 280.255593)">
<path fill="currentColor"
d="M130.667-84.844H3.575l54.618-168.098c2.811-8.649,15.047-8.649,
17.856,0L130.667-84.844 L130.667-84.844z" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 1.6 KiB

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,i,n;e.da=function(){this.pipeline.reset(),this.pipeline.add(e.da.trimmer,e.da.stopWordFilter,e.da.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.da.stemmer))},e.da.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.da.trimmer=e.trimmerSupport.generateTrimmer(e.da.wordCharacters),e.Pipeline.registerFunction(e.da.trimmer,"trimmer-da"),e.da.stemmer=(r=e.stemmerSupport.Among,i=e.stemmerSupport.SnowballProgram,n=new function(){var e,n,t,s=[new r("hed",-1,1),new r("ethed",0,1),new r("ered",-1,1),new r("e",-1,1),new r("erede",3,1),new r("ende",3,1),new r("erende",5,1),new r("ene",3,1),new r("erne",3,1),new r("ere",3,1),new r("en",-1,1),new r("heden",10,1),new r("eren",10,1),new r("er",-1,1),new r("heder",13,1),new r("erer",13,1),new r("s",-1,2),new r("heds",16,1),new r("es",16,1),new r("endes",18,1),new r("erendes",19,1),new r("enes",18,1),new r("ernes",18,1),new r("eres",18,1),new r("ens",16,1),new r("hedens",24,1),new r("erens",24,1),new r("ers",16,1),new r("ets",16,1),new r("erets",28,1),new r("et",-1,1),new r("eret",30,1)],o=[new r("gd",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("elig",1,1),new r("els",-1,1),new r("løst",-1,2)],d=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16],c=new i;function l(){var e,r=c.limit-c.cursor;c.cursor>=n&&(e=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.find_among_b(o,4)?(c.bra=c.cursor,c.limit_backward=e,c.cursor=c.limit-r,c.cursor>c.limit_backward&&(c.cursor--,c.bra=c.cursor,c.slice_del())):c.limit_backward=e)}this.setCurrent=function(e){c.setCurrent(e)},this.getCurrent=function(){return c.getCurrent()},this.stem=function(){var r,i=c.cursor;return function(){var r,i=c.cursor+3;if(n=c.limit,0<=i&&i<=c.limit){for(e=i;;){if(r=c.cursor,c.in_grouping(d,97,248)){c.cursor=r;break}if(c.cursor=r,r>=c.limit)return;c.cursor++}for(;!c.out_grouping(d,97,248);){if(c.cursor>=c.limit)return;c.cursor++}(n=c.cursor)<e&&(n=e)}}(),c.limit_backward=i,c.cursor=c.limit,function(){var e,r;if(c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(s,32),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del();break;case 2:c.in_grouping_b(u,97,229)&&c.slice_del()}}(),c.cursor=c.limit,l(),c.cursor=c.limit,function(){var e,r,i,t=c.limit-c.cursor;if(c.ket=c.cursor,c.eq_s_b(2,"st")&&(c.bra=c.cursor,c.eq_s_b(2,"ig")&&c.slice_del()),c.cursor=c.limit-t,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,e=c.find_among_b(a,5),c.limit_backward=r,e))switch(c.bra=c.cursor,e){case 1:c.slice_del(),i=c.limit-c.cursor,l(),c.cursor=c.limit-i;break;case 2:c.slice_from("løs")}}(),c.cursor=c.limit,c.cursor>=n&&(r=c.limit_backward,c.limit_backward=n,c.ket=c.cursor,c.out_grouping_b(d,97,248)?(c.bra=c.cursor,t=c.slice_to(t),c.limit_backward=r,c.eq_v_b(t)&&c.slice_del()):c.limit_backward=r),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return n.setCurrent(e),n.stem(),n.getCurrent()}):(n.setCurrent(e),n.stem(),n.getCurrent())}),e.Pipeline.registerFunction(e.da.stemmer,"stemmer-da"),e.da.stopWordFilter=e.generateStopWordFilter("ad af alle alt anden at blev blive bliver da de dem den denne der deres det dette dig din disse dog du efter eller en end er et for fra ham han hans har havde have hende hendes her hos hun hvad hvis hvor i ikke ind jeg jer jo kunne man mange med meget men mig min mine mit mod ned noget nogle nu når og også om op os over på selv sig sin sine sit skal skulle som sådan thi til ud under var vi vil ville vor være været".split(" ")),e.Pipeline.registerFunction(e.da.stopWordFilter,"stopWordFilter-da")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r="2"==e.version[0];e.jp=function(){this.pipeline.reset(),this.pipeline.add(e.jp.stopWordFilter,e.jp.stemmer),r?this.tokenizer=e.jp.tokenizer:(e.tokenizer&&(e.tokenizer=e.jp.tokenizer),this.tokenizerFn&&(this.tokenizerFn=e.jp.tokenizer))};var t=new e.TinySegmenter;e.jp.tokenizer=function(n){if(!arguments.length||null==n||null==n)return[];if(Array.isArray(n))return n.map(function(t){return r?new e.Token(t.toLowerCase()):t.toLowerCase()});for(var i=n.toString().toLowerCase().replace(/^\s+/,""),o=i.length-1;o>=0;o--)if(/\S/.test(i.charAt(o))){i=i.substring(0,o+1);break}return t.segment(i).filter(function(e){return!!e}).map(function(t){return r?new e.Token(t):t})},e.jp.stemmer=function(e){return e},e.Pipeline.registerFunction(e.jp.stemmer,"stemmer-jp"),e.jp.wordCharacters="一二三四五六七八九十百千万億兆一-龠々〆ヵヶぁ-んァ-ヴーア-ン゙a-zA-Z--0-9-",e.jp.stopWordFilter=function(t){if(-1===e.jp.stopWordFilter.stopWords.indexOf(r?t.toString():t))return t},e.jp.stopWordFilter=e.generateStopWordFilter("これ それ あれ この その あの ここ そこ あそこ こちら どこ だれ なに なん 何 私 貴方 貴方方 我々 私達 あの人 あのかた 彼女 彼 です あります おります います は が の に を で え から まで より も どの と し それで しかし".split(" ")),e.Pipeline.registerFunction(e.jp.stopWordFilter,"stopWordFilter-jp")}});

View file

@ -0,0 +1 @@
!function(e,i){"function"==typeof define&&define.amd?define(i):"object"==typeof exports?module.exports=i():i()(e.lunr)}(this,function(){return function(e){e.multiLanguage=function(){for(var i=Array.prototype.slice.call(arguments),t=i.join("-"),r="",n=[],s=[],p=0;p<i.length;++p)"en"==i[p]?(r+="\\w",n.unshift(e.stopWordFilter),n.push(e.stemmer),s.push(e.stemmer)):(r+=e[i[p]].wordCharacters,n.unshift(e[i[p]].stopWordFilter),n.push(e[i[p]].stemmer),s.push(e[i[p]].stemmer));var o=e.trimmerSupport.generateTrimmer(r);return e.Pipeline.registerFunction(o,"lunr-multi-trimmer-"+t),n.unshift(o),function(){this.pipeline.reset(),this.pipeline.add.apply(this.pipeline,n),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add.apply(this.searchPipeline,s))}}}});

View file

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,i;e.no=function(){this.pipeline.reset(),this.pipeline.add(e.no.trimmer,e.no.stopWordFilter,e.no.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.no.stemmer))},e.no.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.no.trimmer=e.trimmerSupport.generateTrimmer(e.no.wordCharacters),e.Pipeline.registerFunction(e.no.trimmer,"trimmer-no"),e.no.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,i=new function(){var e,i,t=[new r("a",-1,1),new r("e",-1,1),new r("ede",1,1),new r("ande",1,1),new r("ende",1,1),new r("ane",1,1),new r("ene",1,1),new r("hetene",6,1),new r("erte",1,3),new r("en",-1,1),new r("heten",9,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",12,1),new r("s",-1,2),new r("as",14,1),new r("es",14,1),new r("edes",16,1),new r("endes",16,1),new r("enes",16,1),new r("hetenes",19,1),new r("ens",14,1),new r("hetens",21,1),new r("ers",14,1),new r("ets",14,1),new r("et",-1,1),new r("het",25,1),new r("ert",-1,3),new r("ast",-1,1)],o=[new r("dt",-1,-1),new r("vt",-1,-1)],s=[new r("leg",-1,1),new r("eleg",0,1),new r("ig",-1,1),new r("eig",2,1),new r("lig",2,1),new r("elig",4,1),new r("els",-1,1),new r("lov",-1,1),new r("elov",7,1),new r("slov",7,1),new r("hetslov",9,1)],a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128],m=[119,125,149,1],l=new n;this.setCurrent=function(e){l.setCurrent(e)},this.getCurrent=function(){return l.getCurrent()},this.stem=function(){var r,n,u,d,c=l.cursor;return function(){var r,n=l.cursor+3;if(i=l.limit,0<=n||n<=l.limit){for(e=n;;){if(r=l.cursor,l.in_grouping(a,97,248)){l.cursor=r;break}if(r>=l.limit)return;l.cursor=r+1}for(;!l.out_grouping(a,97,248);){if(l.cursor>=l.limit)return;l.cursor++}(i=l.cursor)<e&&(i=e)}}(),l.limit_backward=c,l.cursor=l.limit,function(){var e,r,n;if(l.cursor>=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,e=l.find_among_b(t,29),l.limit_backward=r,e))switch(l.bra=l.cursor,e){case 1:l.slice_del();break;case 2:n=l.limit-l.cursor,l.in_grouping_b(m,98,122)?l.slice_del():(l.cursor=l.limit-n,l.eq_s_b(1,"k")&&l.out_grouping_b(a,97,248)&&l.slice_del());break;case 3:l.slice_from("er")}}(),l.cursor=l.limit,n=l.limit-l.cursor,l.cursor>=i&&(r=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,l.find_among_b(o,2)?(l.bra=l.cursor,l.limit_backward=r,l.cursor=l.limit-n,l.cursor>l.limit_backward&&(l.cursor--,l.bra=l.cursor,l.slice_del())):l.limit_backward=r),l.cursor=l.limit,l.cursor>=i&&(d=l.limit_backward,l.limit_backward=i,l.ket=l.cursor,(u=l.find_among_b(s,11))?(l.bra=l.cursor,l.limit_backward=d,1==u&&l.slice_del()):l.limit_backward=d),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return i.setCurrent(e),i.stem(),i.getCurrent()}):(i.setCurrent(e),i.stem(),i.getCurrent())}),e.Pipeline.registerFunction(e.no.stemmer,"stemmer-no"),e.no.stopWordFilter=e.generateStopWordFilter("alle at av bare begge ble blei bli blir blitt både båe da de deg dei deim deira deires dem den denne der dere deres det dette di din disse ditt du dykk dykkar då eg ein eit eitt eller elles en enn er et ett etter for fordi fra før ha hadde han hans har hennar henne hennes her hjå ho hoe honom hoss hossen hun hva hvem hver hvilke hvilken hvis hvor hvordan hvorfor i ikke ikkje ikkje ingen ingi inkje inn inni ja jeg kan kom korleis korso kun kunne kva kvar kvarhelst kven kvi kvifor man mange me med medan meg meget mellom men mi min mine mitt mot mykje ned no noe noen noka noko nokon nokor nokre nå når og også om opp oss over på samme seg selv si si sia sidan siden sin sine sitt sjøl skal skulle slik so som som somme somt så sånn til um upp ut uten var vart varte ved vere verte vi vil ville vore vors vort vår være være vært å".split(" ")),e.Pipeline.registerFunction(e.no.stopWordFilter,"stopWordFilter-no")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

View file

@ -0,0 +1 @@
!function(r,t){"function"==typeof define&&define.amd?define(t):"object"==typeof exports?module.exports=t():t()(r.lunr)}(this,function(){return function(r){r.stemmerSupport={Among:function(r,t,i,s){if(this.toCharArray=function(r){for(var t=r.length,i=new Array(t),s=0;s<t;s++)i[s]=r.charCodeAt(s);return i},!r&&""!=r||!t&&0!=t||!i)throw"Bad Among initialisation: s:"+r+", substring_i: "+t+", result: "+i;this.s_size=r.length,this.s=this.toCharArray(r),this.substring_i=t,this.result=i,this.method=s},SnowballProgram:function(){var r;return{bra:0,ket:0,limit:0,cursor:0,limit_backward:0,setCurrent:function(t){r=t,this.cursor=0,this.limit=t.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},getCurrent:function(){var t=r;return r=null,t},in_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e<=s&&e>=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor++,!0}return!1},in_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e<=s&&e>=i&&t[(e-=i)>>3]&1<<(7&e))return this.cursor--,!0}return!1},out_grouping:function(t,i,s){if(this.cursor<this.limit){var e=r.charCodeAt(this.cursor);if(e>s||e<i)return this.cursor++,!0;if(!(t[(e-=i)>>3]&1<<(7&e)))return this.cursor++,!0}return!1},out_grouping_b:function(t,i,s){if(this.cursor>this.limit_backward){var e=r.charCodeAt(this.cursor-1);if(e>s||e<i)return this.cursor--,!0;if(!(t[(e-=i)>>3]&1<<(7&e)))return this.cursor--,!0}return!1},eq_s:function(t,i){if(this.limit-this.cursor<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor+s)!=i.charCodeAt(s))return!1;return this.cursor+=t,!0},eq_s_b:function(t,i){if(this.cursor-this.limit_backward<t)return!1;for(var s=0;s<t;s++)if(r.charCodeAt(this.cursor-t+s)!=i.charCodeAt(s))return!1;return this.cursor-=t,!0},find_among:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=t[a],m=l;m<_.s_size;m++){if(n+l==u){f=-1;break}if(f=r.charCodeAt(n+l)-_.s[m])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){if(o>=(_=t[s]).s_size){if(this.cursor=n+_.s_size,!_.method)return _.result;var b=_.method();if(this.cursor=n+_.s_size,b)return _.result}if((s=_.substring_i)<0)return 0}},find_among_b:function(t,i){for(var s=0,e=i,n=this.cursor,u=this.limit_backward,o=0,h=0,c=!1;;){for(var a=s+(e-s>>1),f=0,l=o<h?o:h,_=(m=t[a]).s_size-1-l;_>=0;_--){if(n-l==u){f=-1;break}if(f=r.charCodeAt(n-1-l)-m.s[_])break;l++}if(f<0?(e=a,h=l):(s=a,o=l),e-s<=1){if(s>0||e==s||c)break;c=!0}}for(;;){var m;if(o>=(m=t[s]).s_size){if(this.cursor=n-m.s_size,!m.method)return m.result;var b=m.method();if(this.cursor=n-m.s_size,b)return m.result}if((s=m.substring_i)<0)return 0}},replace_s:function(t,i,s){var e=s.length-(i-t),n=r.substring(0,t),u=r.substring(i);return r=n+s+u,this.limit+=e,this.cursor>=i?this.cursor+=e:this.cursor>t&&(this.cursor=t),e},slice_check:function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>r.length)throw"faulty slice operation"},slice_from:function(r){this.slice_check(),this.replace_s(this.bra,this.ket,r)},slice_del:function(){this.slice_from("")},insert:function(r,t,i){var s=this.replace_s(r,t,i);r<=this.bra&&(this.bra+=s),r<=this.ket&&(this.ket+=s)},slice_to:function(){return this.slice_check(),r.substring(this.bra,this.ket)},eq_v_b:function(r){return this.eq_s_b(r.length,r)}}}},r.trimmerSupport={generateTrimmer:function(r){var t=new RegExp("^[^"+r+"]+"),i=new RegExp("[^"+r+"]+$");return function(r){return"function"==typeof r.update?r.update(function(r){return r.replace(t,"").replace(i,"")}):r.replace(t,"").replace(i,"")}}}}});

View file

@ -0,0 +1 @@
!function(e,r){"function"==typeof define&&define.amd?define(r):"object"==typeof exports?module.exports=r():r()(e.lunr)}(this,function(){return function(e){if(void 0===e)throw new Error("Lunr is not present. Please include / require Lunr before this script.");if(void 0===e.stemmerSupport)throw new Error("Lunr stemmer support is not present. Please include / require Lunr stemmer support before this script.");var r,n,t;e.sv=function(){this.pipeline.reset(),this.pipeline.add(e.sv.trimmer,e.sv.stopWordFilter,e.sv.stemmer),this.searchPipeline&&(this.searchPipeline.reset(),this.searchPipeline.add(e.sv.stemmer))},e.sv.wordCharacters="A-Za-zªºÀ-ÖØ-öø-ʸˠ-ˤᴀ-ᴥᴬ-ᵜᵢ-ᵥᵫ-ᵷᵹ-ᶾḀ-ỿⁱⁿₐ-ₜKÅℲⅎⅠ-ↈⱠ-ⱿꜢ-ꞇꞋ-ꞭꞰ-ꞷꟷ-ꟿꬰ-ꭚꭜ-ꭤff-stA--",e.sv.trimmer=e.trimmerSupport.generateTrimmer(e.sv.wordCharacters),e.Pipeline.registerFunction(e.sv.trimmer,"trimmer-sv"),e.sv.stemmer=(r=e.stemmerSupport.Among,n=e.stemmerSupport.SnowballProgram,t=new function(){var e,t,i=[new r("a",-1,1),new r("arna",0,1),new r("erna",0,1),new r("heterna",2,1),new r("orna",0,1),new r("ad",-1,1),new r("e",-1,1),new r("ade",6,1),new r("ande",6,1),new r("arne",6,1),new r("are",6,1),new r("aste",6,1),new r("en",-1,1),new r("anden",12,1),new r("aren",12,1),new r("heten",12,1),new r("ern",-1,1),new r("ar",-1,1),new r("er",-1,1),new r("heter",18,1),new r("or",-1,1),new r("s",-1,2),new r("as",21,1),new r("arnas",22,1),new r("ernas",22,1),new r("ornas",22,1),new r("es",21,1),new r("ades",26,1),new r("andes",26,1),new r("ens",21,1),new r("arens",29,1),new r("hetens",29,1),new r("erns",21,1),new r("at",-1,1),new r("andet",-1,1),new r("het",-1,1),new r("ast",-1,1)],s=[new r("dd",-1,-1),new r("gd",-1,-1),new r("nn",-1,-1),new r("dt",-1,-1),new r("gt",-1,-1),new r("kt",-1,-1),new r("tt",-1,-1)],a=[new r("ig",-1,1),new r("lig",0,1),new r("els",-1,1),new r("fullt",-1,3),new r("löst",-1,2)],o=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32],u=[119,127,149],m=new n;this.setCurrent=function(e){m.setCurrent(e)},this.getCurrent=function(){return m.getCurrent()},this.stem=function(){var r,n=m.cursor;return function(){var r,n=m.cursor+3;if(t=m.limit,0<=n||n<=m.limit){for(e=n;;){if(r=m.cursor,m.in_grouping(o,97,246)){m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)return;m.cursor++}for(;!m.out_grouping(o,97,246);){if(m.cursor>=m.limit)return;m.cursor++}(t=m.cursor)<e&&(t=e)}}(),m.limit_backward=n,m.cursor=m.limit,function(){var e,r=m.limit_backward;if(m.cursor>=t&&(m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(i,37),m.limit_backward=r,e))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.in_grouping_b(u,98,121)&&m.slice_del()}}(),m.cursor=m.limit,r=m.limit_backward,m.cursor>=t&&(m.limit_backward=t,m.cursor=m.limit,m.find_among_b(s,7)&&(m.cursor=m.limit,m.ket=m.cursor,m.cursor>m.limit_backward&&(m.bra=--m.cursor,m.slice_del())),m.limit_backward=r),m.cursor=m.limit,function(){var e,r;if(m.cursor>=t){if(r=m.limit_backward,m.limit_backward=t,m.cursor=m.limit,m.ket=m.cursor,e=m.find_among_b(a,5))switch(m.bra=m.cursor,e){case 1:m.slice_del();break;case 2:m.slice_from("lös");break;case 3:m.slice_from("full")}m.limit_backward=r}}(),!0}},function(e){return"function"==typeof e.update?e.update(function(e){return t.setCurrent(e),t.stem(),t.getCurrent()}):(t.setCurrent(e),t.stem(),t.getCurrent())}),e.Pipeline.registerFunction(e.sv.stemmer,"stemmer-sv"),e.sv.stopWordFilter=e.generateStopWordFilter("alla allt att av blev bli blir blivit de dem den denna deras dess dessa det detta dig din dina ditt du där då efter ej eller en er era ert ett från för ha hade han hans har henne hennes hon honom hur här i icke ingen inom inte jag ju kan kunde man med mellan men mig min mina mitt mot mycket ni nu när någon något några och om oss på samma sedan sig sin sina sitta själv skulle som så sådan sådana sådant till under upp ut utan vad var vara varför varit varje vars vart vem vi vid vilka vilkas vilken vilket vår våra vårt än är åt över".split(" ")),e.Pipeline.registerFunction(e.sv.stopWordFilter,"stopWordFilter-sv")}});

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

File diff suppressed because one or more lines are too long

2587
lbry/docs/cli/index.html Normal file

File diff suppressed because it is too large Load diff

2313
lbry/docs/index.html Normal file

File diff suppressed because it is too large Load diff

File diff suppressed because one or more lines are too long

View file

@ -1,5 +0,0 @@
generate:
python generate.py generate > __init__.py
analyze:
python generate.py analyze

View file

@ -1,95 +0,0 @@
# Exceptions
Exceptions in LBRY are defined and generated from the Markdown table at the end of this README.
## Guidelines
When possible, use [built-in Python exceptions](https://docs.python.org/3/library/exceptions.html) or `aiohttp` [general client](https://docs.aiohttp.org/en/latest/client_reference.html#client-exceptions) / [HTTP](https://docs.aiohttp.org/en/latest/web_exceptions.html) exceptions, unless:
1. You want to provide a better error message (extend the closest built-in/`aiohttp` exception in this case).
2. You need to represent a new situation.
When defining your own exceptions, consider:
1. Extending a built-in Python or `aiohttp` exception.
2. Using contextual variables in the error message.
## Table Column Definitions
Column | Meaning
---|---
Code | Codes are used only to define the hierarchy of exceptions and do not end up in the generated output, it is okay to re-number things as necessary at anytime to achieve the desired hierarchy.
Name | Becomes the class name of the exception with "Error" appended to the end. Changing names of existing exceptions makes the API backwards incompatible. When extending other exceptions you must specify the full class name, manually adding "Error" as necessary (if extending another SDK exception).
Message | User friendly error message explaining the exceptional event. Supports Python formatted strings: any variables used in the string will be generated as arguments in the `__init__` method. Use `--` to provide a doc string after the error message to be added to the class definition.
## Exceptions Table
Code | Name | Message
---:|---|---
**1xx** | UserInput | User input errors.
**10x** | Command | Errors preparing to execute commands.
101 | CommandDoesNotExist | Command '{command}' does not exist.
102 | CommandDeprecated | Command '{command}' is deprecated.
103 | CommandInvalidArgument | Invalid argument '{argument}' to command '{command}'.
104 | CommandTemporarilyUnavailable | Command '{command}' is temporarily unavailable. -- Such as waiting for required components to start.
105 | CommandPermanentlyUnavailable | Command '{command}' is permanently unavailable. -- such as when required component was intentionally configured not to start.
**11x** | InputValue(ValueError) | Invalid argument value provided to command.
111 | GenericInputValue | The value '{value}' for argument '{argument}' is not valid.
112 | InputValueIsNone | None or null is not valid value for argument '{argument}'.
113 | ConflictingInputValue | Only '{first_argument}' or '{second_argument}' is allowed, not both.
114 | InputStringIsBlank | {argument} cannot be blank.
115 | EmptyPublishedFile | Cannot publish empty file: {file_path}
116 | MissingPublishedFile | File does not exist: {file_path}
117 | InvalidStreamURL | Invalid LBRY stream URL: '{url}' -- When an URL cannot be downloaded, such as '@Channel/' or a collection
**2xx** | Configuration | Configuration errors.
201 | ConfigWrite | Cannot write configuration file '{path}'. -- When writing the default config fails on startup, such as due to permission issues.
202 | ConfigRead | Cannot find provided configuration file '{path}'. -- Can't open the config file user provided via command line args.
203 | ConfigParse | Failed to parse the configuration file '{path}'. -- Includes the syntax error / line number to help user fix it.
204 | ConfigMissing | Configuration file '{path}' is missing setting that has no default / fallback.
205 | ConfigInvalid | Configuration file '{path}' has setting with invalid value.
**3xx** | Network | **Networking**
301 | NoInternet | No internet connection.
302 | NoUPnPSupport | Router does not support UPnP.
**4xx** | Wallet | **Wallet Errors**
401 | TransactionRejected | Transaction rejected, unknown reason.
402 | TransactionFeeTooLow | Fee too low.
403 | TransactionInvalidSignature | Invalid signature.
404 | InsufficientFunds | Not enough funds to cover this transaction. -- determined by wallet prior to attempting to broadcast a tx; this is different for example from a TX being created and sent but then rejected by lbrycrd for unspendable utxos.
405 | ChannelKeyNotFound | Channel signing key not found.
406 | ChannelKeyInvalid | Channel signing key is out of date. -- For example, channel was updated but you don't have the updated key.
407 | DataDownload | Failed to download blob. *generic*
408 | PrivateKeyNotFound | Couldn't find private key for {key} '{value}'.
410 | Resolve | Failed to resolve '{url}'.
411 | ResolveTimeout | Failed to resolve '{url}' within the timeout.
411 | ResolveCensored | Resolve of '{url}' was censored by channel with claim id '{censor_id}'.
420 | KeyFeeAboveMaxAllowed | {message}
421 | InvalidPassword | Password is invalid.
422 | IncompatibleWalletServer | '{server}:{port}' has an incompatibly old version.
423 | TooManyClaimSearchParameters | {key} cant have more than {limit} items.
424 | AlreadyPurchased | You already have a purchase for claim_id '{claim_id_hex}'. Use --allow-duplicate-purchase flag to override.
431 | ServerPaymentInvalidAddress | Invalid address from wallet server: '{address}' - skipping payment round.
432 | ServerPaymentWalletLocked | Cannot spend funds with locked wallet, skipping payment round.
433 | ServerPaymentFeeAboveMaxAllowed | Daily server fee of {daily_fee} exceeds maximum configured of {max_fee} LBC.
434 | WalletNotLoaded | Wallet {wallet_id} is not loaded.
435 | WalletAlreadyLoaded | Wallet {wallet_path} is already loaded.
436 | WalletNotFound | Wallet not found at {wallet_path}.
437 | WalletAlreadyExists | Wallet {wallet_path} already exists, use `wallet_add` to load it.
**5xx** | Blob | **Blobs**
500 | BlobNotFound | Blob not found.
501 | BlobPermissionDenied | Permission denied to read blob.
502 | BlobTooBig | Blob is too big.
503 | BlobEmpty | Blob is empty.
510 | BlobFailedDecryption | Failed to decrypt blob.
511 | CorruptBlob | Blobs is corrupted.
520 | BlobFailedEncryption | Failed to encrypt blob.
531 | DownloadCancelled | Download was canceled.
532 | DownloadSDTimeout | Failed to download sd blob {download} within timeout.
533 | DownloadDataTimeout | Failed to download data blobs for sd hash {download} within timeout.
534 | InvalidStreamDescriptor | {message}
535 | InvalidData | {message}
536 | InvalidBlobHash | {message}
**6xx** | Component | **Components**
601 | ComponentStartConditionNotMet | Unresolved dependencies for: {components}
602 | ComponentsNotStarted | {message}
**7xx** | CurrencyExchange | **Currency Exchange**
701 | InvalidExchangeRateResponse | Failed to get exchange rate from {source}: {reason}
702 | CurrencyConversion | {message}
703 | InvalidCurrency | Invalid currency: {currency} is not a supported currency.

View file

@ -1,494 +0,0 @@
from .base import BaseError, claim_id
class UserInputError(BaseError):
"""
User input errors.
"""
class CommandError(UserInputError):
"""
Errors preparing to execute commands.
"""
class CommandDoesNotExistError(CommandError):
def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' does not exist.")
class CommandDeprecatedError(CommandError):
def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is deprecated.")
class CommandInvalidArgumentError(CommandError):
def __init__(self, argument, command):
self.argument = argument
self.command = command
super().__init__(f"Invalid argument '{argument}' to command '{command}'.")
class CommandTemporarilyUnavailableError(CommandError):
"""
Such as waiting for required components to start.
"""
def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is temporarily unavailable.")
class CommandPermanentlyUnavailableError(CommandError):
"""
such as when required component was intentionally configured not to start.
"""
def __init__(self, command):
self.command = command
super().__init__(f"Command '{command}' is permanently unavailable.")
class InputValueError(UserInputError, ValueError):
"""
Invalid argument value provided to command.
"""
class GenericInputValueError(InputValueError):
def __init__(self, value, argument):
self.value = value
self.argument = argument
super().__init__(f"The value '{value}' for argument '{argument}' is not valid.")
class InputValueIsNoneError(InputValueError):
def __init__(self, argument):
self.argument = argument
super().__init__(f"None or null is not valid value for argument '{argument}'.")
class ConflictingInputValueError(InputValueError):
def __init__(self, first_argument, second_argument):
self.first_argument = first_argument
self.second_argument = second_argument
super().__init__(f"Only '{first_argument}' or '{second_argument}' is allowed, not both.")
class InputStringIsBlankError(InputValueError):
def __init__(self, argument):
self.argument = argument
super().__init__(f"{argument} cannot be blank.")
class EmptyPublishedFileError(InputValueError):
def __init__(self, file_path):
self.file_path = file_path
super().__init__(f"Cannot publish empty file: {file_path}")
class MissingPublishedFileError(InputValueError):
def __init__(self, file_path):
self.file_path = file_path
super().__init__(f"File does not exist: {file_path}")
class InvalidStreamURLError(InputValueError):
"""
When an URL cannot be downloaded, such as '@Channel/' or a collection
"""
def __init__(self, url):
self.url = url
super().__init__(f"Invalid LBRY stream URL: '{url}'")
class ConfigurationError(BaseError):
"""
Configuration errors.
"""
class ConfigWriteError(ConfigurationError):
"""
When writing the default config fails on startup, such as due to permission issues.
"""
def __init__(self, path):
self.path = path
super().__init__(f"Cannot write configuration file '{path}'.")
class ConfigReadError(ConfigurationError):
"""
Can't open the config file user provided via command line args.
"""
def __init__(self, path):
self.path = path
super().__init__(f"Cannot find provided configuration file '{path}'.")
class ConfigParseError(ConfigurationError):
"""
Includes the syntax error / line number to help user fix it.
"""
def __init__(self, path):
self.path = path
super().__init__(f"Failed to parse the configuration file '{path}'.")
class ConfigMissingError(ConfigurationError):
def __init__(self, path):
self.path = path
super().__init__(f"Configuration file '{path}' is missing setting that has no default / fallback.")
class ConfigInvalidError(ConfigurationError):
def __init__(self, path):
self.path = path
super().__init__(f"Configuration file '{path}' has setting with invalid value.")
class NetworkError(BaseError):
"""
**Networking**
"""
class NoInternetError(NetworkError):
def __init__(self):
super().__init__("No internet connection.")
class NoUPnPSupportError(NetworkError):
def __init__(self):
super().__init__("Router does not support UPnP.")
class WalletError(BaseError):
"""
**Wallet Errors**
"""
class TransactionRejectedError(WalletError):
def __init__(self):
super().__init__("Transaction rejected, unknown reason.")
class TransactionFeeTooLowError(WalletError):
def __init__(self):
super().__init__("Fee too low.")
class TransactionInvalidSignatureError(WalletError):
def __init__(self):
super().__init__("Invalid signature.")
class InsufficientFundsError(WalletError):
"""
determined by wallet prior to attempting to broadcast a tx; this is different for example from a TX
being created and sent but then rejected by lbrycrd for unspendable utxos.
"""
def __init__(self):
super().__init__("Not enough funds to cover this transaction.")
class ChannelKeyNotFoundError(WalletError):
def __init__(self):
super().__init__("Channel signing key not found.")
class ChannelKeyInvalidError(WalletError):
"""
For example, channel was updated but you don't have the updated key.
"""
def __init__(self):
super().__init__("Channel signing key is out of date.")
class DataDownloadError(WalletError):
def __init__(self):
super().__init__("Failed to download blob. *generic*")
class PrivateKeyNotFoundError(WalletError):
def __init__(self, key, value):
self.key = key
self.value = value
super().__init__(f"Couldn't find private key for {key} '{value}'.")
class ResolveError(WalletError):
def __init__(self, url):
self.url = url
super().__init__(f"Failed to resolve '{url}'.")
class ResolveTimeoutError(WalletError):
def __init__(self, url):
self.url = url
super().__init__(f"Failed to resolve '{url}' within the timeout.")
class ResolveCensoredError(WalletError):
def __init__(self, url, censor_id, censor_row):
self.url = url
self.censor_id = censor_id
self.censor_row = censor_row
super().__init__(f"Resolve of '{url}' was censored by channel with claim id '{censor_id}'.")
class KeyFeeAboveMaxAllowedError(WalletError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class InvalidPasswordError(WalletError):
def __init__(self):
super().__init__("Password is invalid.")
class IncompatibleWalletServerError(WalletError):
def __init__(self, server, port):
self.server = server
self.port = port
super().__init__(f"'{server}:{port}' has an incompatibly old version.")
class TooManyClaimSearchParametersError(WalletError):
def __init__(self, key, limit):
self.key = key
self.limit = limit
super().__init__(f"{key} cant have more than {limit} items.")
class AlreadyPurchasedError(WalletError):
"""
allow-duplicate-purchase flag to override.
"""
def __init__(self, claim_id_hex):
self.claim_id_hex = claim_id_hex
super().__init__(f"You already have a purchase for claim_id '{claim_id_hex}'. Use")
class ServerPaymentInvalidAddressError(WalletError):
def __init__(self, address):
self.address = address
super().__init__(f"Invalid address from wallet server: '{address}' - skipping payment round.")
class ServerPaymentWalletLockedError(WalletError):
def __init__(self):
super().__init__("Cannot spend funds with locked wallet, skipping payment round.")
class ServerPaymentFeeAboveMaxAllowedError(WalletError):
def __init__(self, daily_fee, max_fee):
self.daily_fee = daily_fee
self.max_fee = max_fee
super().__init__(f"Daily server fee of {daily_fee} exceeds maximum configured of {max_fee} LBC.")
class WalletNotLoadedError(WalletError):
def __init__(self, wallet_id):
self.wallet_id = wallet_id
super().__init__(f"Wallet {wallet_id} is not loaded.")
class WalletAlreadyLoadedError(WalletError):
def __init__(self, wallet_path):
self.wallet_path = wallet_path
super().__init__(f"Wallet {wallet_path} is already loaded.")
class WalletNotFoundError(WalletError):
def __init__(self, wallet_path):
self.wallet_path = wallet_path
super().__init__(f"Wallet not found at {wallet_path}.")
class WalletAlreadyExistsError(WalletError):
def __init__(self, wallet_path):
self.wallet_path = wallet_path
super().__init__(f"Wallet {wallet_path} already exists, use `wallet_add` to load it.")
class BlobError(BaseError):
"""
**Blobs**
"""
class BlobNotFoundError(BlobError):
def __init__(self):
super().__init__("Blob not found.")
class BlobPermissionDeniedError(BlobError):
def __init__(self):
super().__init__("Permission denied to read blob.")
class BlobTooBigError(BlobError):
def __init__(self):
super().__init__("Blob is too big.")
class BlobEmptyError(BlobError):
def __init__(self):
super().__init__("Blob is empty.")
class BlobFailedDecryptionError(BlobError):
def __init__(self):
super().__init__("Failed to decrypt blob.")
class CorruptBlobError(BlobError):
def __init__(self):
super().__init__("Blobs is corrupted.")
class BlobFailedEncryptionError(BlobError):
def __init__(self):
super().__init__("Failed to encrypt blob.")
class DownloadCancelledError(BlobError):
def __init__(self):
super().__init__("Download was canceled.")
class DownloadSDTimeoutError(BlobError):
def __init__(self, download):
self.download = download
super().__init__(f"Failed to download sd blob {download} within timeout.")
class DownloadDataTimeoutError(BlobError):
def __init__(self, download):
self.download = download
super().__init__(f"Failed to download data blobs for sd hash {download} within timeout.")
class InvalidStreamDescriptorError(BlobError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class InvalidDataError(BlobError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class InvalidBlobHashError(BlobError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class ComponentError(BaseError):
"""
**Components**
"""
class ComponentStartConditionNotMetError(ComponentError):
def __init__(self, components):
self.components = components
super().__init__(f"Unresolved dependencies for: {components}")
class ComponentsNotStartedError(ComponentError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class CurrencyExchangeError(BaseError):
"""
**Currency Exchange**
"""
class InvalidExchangeRateResponseError(CurrencyExchangeError):
def __init__(self, source, reason):
self.source = source
self.reason = reason
super().__init__(f"Failed to get exchange rate from {source}: {reason}")
class CurrencyConversionError(CurrencyExchangeError):
def __init__(self, message):
self.message = message
super().__init__(f"{message}")
class InvalidCurrencyError(CurrencyExchangeError):
def __init__(self, currency):
self.currency = currency
super().__init__(f"Invalid currency: {currency} is not a supported currency.")

View file

@ -1,9 +0,0 @@
from binascii import hexlify
def claim_id(claim_hash):
return hexlify(claim_hash[::-1]).decode()
class BaseError(Exception):
pass

View file

@ -1,167 +0,0 @@
import re
import sys
import argparse
from pathlib import Path
from textwrap import fill, indent
INDENT = ' ' * 4
CLASS = """
class {name}({parents}):{doc}
"""
INIT = """
def __init__({args}):{fields}
super().__init__({format}"{message}")
"""
FUNCTIONS = ['claim_id']
class ErrorClass:
def __init__(self, hierarchy, name, message):
self.hierarchy = hierarchy.replace('**', '')
self.other_parents = []
if '(' in name:
assert ')' in name, f"Missing closing parenthesis in '{name}'."
self.other_parents = name[name.find('(')+1:name.find(')')].split(',')
name = name[:name.find('(')]
self.name = name
self.class_name = name+'Error'
self.message = message
self.comment = ""
if '--' in message:
self.message, self.comment = message.split('--')
self.message = self.message.strip()
self.comment = self.comment.strip()
@property
def is_leaf(self):
return 'x' not in self.hierarchy
@property
def code(self):
return self.hierarchy.replace('x', '')
@property
def parent_codes(self):
return self.hierarchy[0:2], self.hierarchy[0]
def get_arguments(self):
args = ['self']
for arg in re.findall('{([a-z0-1_()]+)}', self.message):
for func in FUNCTIONS:
if arg.startswith(f'{func}('):
arg = arg[len(f'{func}('):-1]
break
args.append(arg)
return args
@staticmethod
def get_fields(args):
if len(args) > 1:
return ''.join(f'\n{INDENT*2}self.{field} = {field}' for field in args[1:])
return ''
@staticmethod
def get_doc_string(doc):
if doc:
return f'\n{INDENT}"""\n{indent(fill(doc, 100), INDENT)}\n{INDENT}"""'
return ""
def render(self, out, parent):
if not parent:
parents = ['BaseError']
else:
parents = [parent.class_name]
parents += self.other_parents
args = self.get_arguments()
if self.is_leaf:
out.write((CLASS + INIT).format(
name=self.class_name, parents=', '.join(parents),
args=', '.join(args), fields=self.get_fields(args),
message=self.message, doc=self.get_doc_string(self.comment), format='f' if len(args) > 1 else ''
))
else:
out.write(CLASS.format(
name=self.class_name, parents=', '.join(parents),
doc=self.get_doc_string(self.comment or self.message)
))
def get_errors():
with open('README.md', 'r') as readme:
lines = iter(readme.readlines())
for line in lines:
if line.startswith('## Exceptions Table'):
break
for line in lines:
if line.startswith('---:|'):
break
for line in lines:
if not line:
break
yield ErrorClass(*[c.strip() for c in line.split('|')])
def find_parent(stack, child):
for parent_code in child.parent_codes:
parent = stack.get(parent_code)
if parent:
return parent
def generate(out):
out.write(f"from .base import BaseError, {', '.join(FUNCTIONS)}\n")
stack = {}
for error in get_errors():
error.render(out, find_parent(stack, error))
if not error.is_leaf:
assert error.code not in stack, f"Duplicate code: {error.code}"
stack[error.code] = error
def analyze():
errors = {e.class_name: [] for e in get_errors() if e.is_leaf}
here = Path(__file__).absolute().parents[0]
module = here.parent
for file_path in module.glob('**/*.py'):
if here in file_path.parents:
continue
with open(file_path) as src_file:
src = src_file.read()
for error in errors.keys():
found = src.count(error)
if found > 0:
errors[error].append((file_path, found))
print('Unused Errors:\n')
for error, used in errors.items():
if used:
print(f' - {error}')
for use in used:
print(f' {use[0].relative_to(module.parent)} {use[1]}')
print('')
print('')
print('Unused Errors:')
for error, used in errors.items():
if not used:
print(f' - {error}')
def main():
parser = argparse.ArgumentParser()
parser.add_argument("action", choices=['generate', 'analyze'])
args = parser.parse_args()
if args.action == "analyze":
analyze()
elif args.action == "generate":
generate(sys.stdout)
if __name__ == "__main__":
main()

File diff suppressed because it is too large Load diff

View file

@ -1,248 +0,0 @@
import json
import time
import asyncio
import logging
from statistics import median
from decimal import Decimal
from typing import Optional, Iterable, Type
from aiohttp.client_exceptions import ContentTypeError, ClientConnectionError
from lbry.error import InvalidExchangeRateResponseError, CurrencyConversionError
from lbry.utils import aiohttp_request
from lbry.wallet.dewies import lbc_to_dewies
log = logging.getLogger(__name__)
class ExchangeRate:
def __init__(self, market, spot, ts):
if not int(time.time()) - ts < 600:
raise ValueError('The timestamp is too dated.')
if not spot > 0:
raise ValueError('Spot must be greater than 0.')
self.currency_pair = (market[0:3], market[3:6])
self.spot = spot
self.ts = ts
def __repr__(self):
return f"Currency pair:{self.currency_pair}, spot:{self.spot}, ts:{self.ts}"
def as_dict(self):
return {'spot': self.spot, 'ts': self.ts}
class MarketFeed:
name: str = ""
market: str = ""
url: str = ""
params = {}
fee = 0
update_interval = 300
request_timeout = 50
def __init__(self):
self.rate: Optional[float] = None
self.last_check = 0
self._last_response = None
self._task: Optional[asyncio.Task] = None
self.event = asyncio.Event()
@property
def has_rate(self):
return self.rate is not None
@property
def is_online(self):
return self.last_check+self.update_interval+self.request_timeout > time.time()
def get_rate_from_response(self, json_response):
raise NotImplementedError()
async def get_response(self):
async with aiohttp_request(
'get', self.url, params=self.params,
timeout=self.request_timeout, headers={"User-Agent": "lbrynet"}
) as response:
try:
self._last_response = await response.json(content_type=None)
except ContentTypeError as e:
self._last_response = {}
log.warning("Could not parse exchange rate response from %s: %s", self.name, e.message)
log.debug(await response.text())
return self._last_response
async def get_rate(self):
try:
data = await self.get_response()
rate = self.get_rate_from_response(data)
rate = rate / (1.0 - self.fee)
log.debug("Saving rate update %f for %s from %s", rate, self.market, self.name)
self.rate = ExchangeRate(self.market, rate, int(time.time()))
self.last_check = time.time()
return self.rate
except asyncio.TimeoutError:
log.warning("Timed out fetching exchange rate from %s.", self.name)
except json.JSONDecodeError as e:
msg = e.doc if '<html>' not in e.doc else 'unexpected content type.'
log.warning("Could not parse exchange rate response from %s: %s", self.name, msg)
log.debug(e.doc)
except InvalidExchangeRateResponseError as e:
log.warning(str(e))
except ClientConnectionError as e:
log.warning("Error trying to connect to exchange rate %s: %s", self.name, str(e))
except Exception as e:
log.exception("Exchange rate error (%s from %s):", self.market, self.name)
finally:
self.event.set()
async def keep_updated(self):
while True:
await self.get_rate()
await asyncio.sleep(self.update_interval)
def start(self):
if not self._task:
self._task = asyncio.create_task(self.keep_updated())
def stop(self):
if self._task and not self._task.done():
self._task.cancel()
self._task = None
self.event.clear()
class BaseBittrexFeed(MarketFeed):
name = "Bittrex"
market = None
url = None
fee = 0.0025
def get_rate_from_response(self, json_response):
if 'lastTradeRate' not in json_response:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response['lastTradeRate'])
class BittrexBTCFeed(BaseBittrexFeed):
market = "BTCLBC"
url = "https://api.bittrex.com/v3/markets/LBC-BTC/ticker"
class BittrexUSDFeed(BaseBittrexFeed):
market = "USDLBC"
url = "https://api.bittrex.com/v3/markets/LBC-USD/ticker"
class BaseCoinExFeed(MarketFeed):
name = "CoinEx"
market = None
url = None
def get_rate_from_response(self, json_response):
if 'data' not in json_response or \
'ticker' not in json_response['data'] or \
'last' not in json_response['data']['ticker']:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response['data']['ticker']['last'])
class CoinExBTCFeed(BaseCoinExFeed):
market = "BTCLBC"
url = "https://api.coinex.com/v1/market/ticker?market=LBCBTC"
class CoinExUSDFeed(BaseCoinExFeed):
market = "USDLBC"
url = "https://api.coinex.com/v1/market/ticker?market=LBCUSDT"
class BaseHotbitFeed(MarketFeed):
name = "hotbit"
market = None
url = "https://api.hotbit.io/api/v1/market.last"
def get_rate_from_response(self, json_response):
if 'result' not in json_response:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response['result'])
class HotbitBTCFeed(BaseHotbitFeed):
market = "BTCLBC"
params = {"market": "LBC/BTC"}
class HotbitUSDFeed(BaseHotbitFeed):
market = "USDLBC"
params = {"market": "LBC/USDT"}
class UPbitBTCFeed(MarketFeed):
name = "UPbit"
market = "BTCLBC"
url = "https://api.upbit.com/v1/ticker"
params = {"markets": "BTC-LBC"}
def get_rate_from_response(self, json_response):
if "error" in json_response or len(json_response) != 1 or 'trade_price' not in json_response[0]:
raise InvalidExchangeRateResponseError(self.name, 'result not found')
return 1.0 / float(json_response[0]['trade_price'])
FEEDS: Iterable[Type[MarketFeed]] = (
BittrexBTCFeed,
BittrexUSDFeed,
CoinExBTCFeed,
CoinExUSDFeed,
# HotbitBTCFeed,
# HotbitUSDFeed,
# UPbitBTCFeed,
)
class ExchangeRateManager:
def __init__(self, feeds=FEEDS):
self.market_feeds = [Feed() for Feed in feeds]
def wait(self):
return asyncio.wait(
[feed.event.wait() for feed in self.market_feeds],
)
def start(self):
log.info("Starting exchange rate manager")
for feed in self.market_feeds:
feed.start()
def stop(self):
log.info("Stopping exchange rate manager")
for source in self.market_feeds:
source.stop()
def convert_currency(self, from_currency, to_currency, amount):
log.debug(
"Converting %f %s to %s, rates: %s",
amount, from_currency, to_currency,
[market.rate for market in self.market_feeds]
)
if from_currency == to_currency:
return round(amount, 8)
rates = []
for market in self.market_feeds:
if (market.has_rate and market.is_online and
market.rate.currency_pair == (from_currency, to_currency)):
rates.append(market.rate.spot)
if rates:
return round(amount * Decimal(median(rates)), 8)
raise CurrencyConversionError(
f'Unable to convert {amount} from {from_currency} to {to_currency}')
def to_dewies(self, currency, amount) -> int:
converted = self.convert_currency(currency, "LBC", amount)
return lbc_to_dewies(str(converted))
def fee_dict(self):
return {market: market.rate.as_dict() for market in self.market_feeds}

View file

@ -1,69 +0,0 @@
import sqlite3
import os
import time
def do_migration(conf):
db_path = os.path.join(conf.data_dir, 'lbrynet.sqlite')
connection = sqlite3.connect(db_path)
connection.row_factory = sqlite3.Row
cursor = connection.cursor()
current_columns = []
for col_info in cursor.execute("pragma table_info('file');").fetchall():
current_columns.append(col_info[1])
if 'added_on' in current_columns:
connection.close()
print('already migrated')
return
# follow 12 step schema change procedure
cursor.execute("pragma foreign_keys=off")
# we don't have any indexes, views or triggers, so step 3 is skipped.
cursor.execute("drop table if exists new_file")
cursor.execute("""
create table if not exists new_file (
stream_hash text not null primary key references stream,
file_name text,
download_directory text,
blob_data_rate text not null,
status text not null,
saved_file integer not null,
content_fee text,
added_on integer not null
);
""")
# step 5: transfer content from old to new
select = "select * from file"
for (stream_hash, file_name, download_dir, blob_rate, status, saved_file, fee) \
in cursor.execute(select).fetchall():
added_on = int(time.time())
cursor.execute(
"insert into new_file values (?, ?, ?, ?, ?, ?, ?, ?)",
(stream_hash, file_name, download_dir, blob_rate, status, saved_file, fee, added_on)
)
# step 6: drop old table
cursor.execute("drop table file")
# step 7: rename new table to old table
cursor.execute("alter table new_file rename to file")
# step 8: we aren't using indexes, views or triggers so skip
# step 9: no views so skip
# step 10: foreign key check
cursor.execute("pragma foreign_key_check;")
# step 11: commit transaction
connection.commit()
# step 12: re-enable foreign keys
connection.execute("pragma foreign_keys=on;")
# done :)
connection.close()

View file

@ -1,80 +0,0 @@
import os
import sqlite3
def do_migration(conf):
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
current_columns = []
for col_info in cursor.execute("pragma table_info('file');").fetchall():
current_columns.append(col_info[1])
if 'bt_infohash' in current_columns:
connection.close()
print("already migrated")
return
cursor.executescript("""
pragma foreign_keys=off;
create table if not exists torrent (
bt_infohash char(20) not null primary key,
tracker text,
length integer not null,
name text not null
);
create table if not exists torrent_node ( -- BEP-0005
bt_infohash char(20) not null references torrent,
host text not null,
port integer not null
);
create table if not exists torrent_tracker ( -- BEP-0012
bt_infohash char(20) not null references torrent,
tracker text not null
);
create table if not exists torrent_http_seed ( -- BEP-0017
bt_infohash char(20) not null references torrent,
http_seed text not null
);
create table if not exists new_file (
stream_hash char(96) references stream,
bt_infohash char(20) references torrent,
file_name text,
download_directory text,
blob_data_rate real not null,
status text not null,
saved_file integer not null,
content_fee text,
added_on integer not null
);
create table if not exists new_content_claim (
stream_hash char(96) references stream,
bt_infohash char(20) references torrent,
claim_outpoint text unique not null references claim
);
insert into new_file (stream_hash, bt_infohash, file_name, download_directory, blob_data_rate, status,
saved_file, content_fee, added_on) select
stream_hash, NULL, file_name, download_directory, blob_data_rate, status, saved_file, content_fee,
added_on
from file;
insert or ignore into new_content_claim (stream_hash, bt_infohash, claim_outpoint)
select stream_hash, NULL, claim_outpoint from content_claim;
drop table file;
drop table content_claim;
alter table new_file rename to file;
alter table new_content_claim rename to content_claim;
pragma foreign_keys=on;
""")
connection.commit()
connection.close()

View file

@ -1,21 +0,0 @@
import os
import sqlite3
def do_migration(conf):
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
cursor.executescript("""
create table if not exists peer (
node_id char(96) not null primary key,
address text not null,
udp_port integer not null,
tcp_port integer,
unique (address, udp_port)
);
""")
connection.commit()
connection.close()

View file

@ -1,16 +0,0 @@
import os
import sqlite3
def do_migration(conf):
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
cursor.executescript("""
alter table blob add column added_on integer not null default 0;
alter table blob add column is_mine integer not null default 1;
""")
connection.commit()
connection.close()

View file

@ -1,17 +0,0 @@
import os
import sqlite3
def do_migration(conf):
db_path = os.path.join(conf.data_dir, "lbrynet.sqlite")
connection = sqlite3.connect(db_path)
cursor = connection.cursor()
cursor.executescript("""
update blob set should_announce=0
where should_announce=1 and
blob.blob_hash in (select stream_blob.blob_hash from stream_blob where position=0);
""")
connection.commit()
connection.close()

View file

@ -1,31 +0,0 @@
import logging
from aiohttp import web
log = logging.getLogger(__name__)
def ensure_request_allowed(request, conf):
if is_request_allowed(request, conf):
return
if conf.allowed_origin:
log.warning(
"API requests with Origin '%s' are not allowed, "
"configuration 'allowed_origin' limits requests to: '%s'",
request.headers.get('Origin'), conf.allowed_origin
)
else:
log.warning(
"API requests with Origin '%s' are not allowed, "
"update configuration 'allowed_origin' to enable this origin.",
request.headers.get('Origin')
)
raise web.HTTPForbidden()
def is_request_allowed(request, conf) -> bool:
origin = request.headers.get('Origin')
return (
origin is None or
origin == conf.allowed_origin or
conf.allowed_origin == '*'
)

View file

@ -1,307 +0,0 @@
import asyncio
import logging
import typing
from typing import Optional
from aiohttp.web import Request
from lbry.error import ResolveError, DownloadSDTimeoutError, InsufficientFundsError
from lbry.error import ResolveTimeoutError, DownloadDataTimeoutError, KeyFeeAboveMaxAllowedError
from lbry.error import InvalidStreamURLError
from lbry.stream.managed_stream import ManagedStream
from lbry.torrent.torrent_manager import TorrentSource
from lbry.utils import cache_concurrent
from lbry.schema.url import URL
from lbry.wallet.dewies import dewies_to_lbc
from lbry.file.source_manager import SourceManager
from lbry.file.source import ManagedDownloadSource
from lbry.extras.daemon.storage import StoredContentClaim
if typing.TYPE_CHECKING:
from lbry.conf import Config
from lbry.extras.daemon.analytics import AnalyticsManager
from lbry.extras.daemon.storage import SQLiteStorage
from lbry.wallet import WalletManager
from lbry.extras.daemon.exchange_rate_manager import ExchangeRateManager
log = logging.getLogger(__name__)
class FileManager:
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', wallet_manager: 'WalletManager',
storage: 'SQLiteStorage', analytics_manager: Optional['AnalyticsManager'] = None):
self.loop = loop
self.config = config
self.wallet_manager = wallet_manager
self.storage = storage
self.analytics_manager = analytics_manager
self.source_managers: typing.Dict[str, SourceManager] = {}
self.started = asyncio.Event()
@property
def streams(self):
return self.source_managers['stream']._sources
async def create_stream(self, file_path: str, key: Optional[bytes] = None, **kwargs) -> ManagedDownloadSource:
if 'stream' in self.source_managers:
return await self.source_managers['stream'].create(file_path, key, **kwargs)
raise NotImplementedError
async def start(self):
await asyncio.gather(*(source_manager.start() for source_manager in self.source_managers.values()))
for manager in self.source_managers.values():
await manager.started.wait()
self.started.set()
async def stop(self):
for manager in self.source_managers.values():
# fixme: pop or not?
await manager.stop()
self.started.clear()
@cache_concurrent
async def download_from_uri(self, uri, exchange_rate_manager: 'ExchangeRateManager',
timeout: Optional[float] = None, file_name: Optional[str] = None,
download_directory: Optional[str] = None,
save_file: Optional[bool] = None, resolve_timeout: float = 3.0,
wallet: Optional['Wallet'] = None) -> ManagedDownloadSource:
wallet = wallet or self.wallet_manager.default_wallet
timeout = timeout or self.config.download_timeout
start_time = self.loop.time()
resolved_time = None
stream = None
claim = None
error = None
outpoint = None
if save_file is None:
save_file = self.config.save_files
if file_name and not save_file:
save_file = True
if save_file:
download_directory = download_directory or self.config.download_dir
else:
download_directory = None
payment = None
try:
# resolve the claim
try:
if not URL.parse(uri).has_stream:
raise InvalidStreamURLError(uri)
except ValueError:
raise InvalidStreamURLError(uri)
try:
resolved_result = await asyncio.wait_for(
self.wallet_manager.ledger.resolve(
wallet.accounts, [uri],
include_purchase_receipt=True,
include_is_my_output=True
), resolve_timeout
)
except asyncio.TimeoutError:
raise ResolveTimeoutError(uri)
except Exception as err:
log.exception("Unexpected error resolving stream:")
raise ResolveError(f"Unexpected error resolving stream: {str(err)}")
if 'error' in resolved_result:
raise ResolveError(f"Unexpected error resolving uri for download: {resolved_result['error']}")
if not resolved_result or uri not in resolved_result:
raise ResolveError(f"Failed to resolve stream at '{uri}'")
txo = resolved_result[uri]
if isinstance(txo, dict):
raise ResolveError(f"Failed to resolve stream at '{uri}': {txo}")
claim = txo.claim
outpoint = f"{txo.tx_ref.id}:{txo.position}"
resolved_time = self.loop.time() - start_time
await self.storage.save_claim_from_output(self.wallet_manager.ledger, txo)
####################
# update or replace
####################
if claim.stream.source.bt_infohash:
source_manager = self.source_managers['torrent']
existing = source_manager.get_filtered(bt_infohash=claim.stream.source.bt_infohash)
elif claim.stream.source.sd_hash:
source_manager = self.source_managers['stream']
existing = source_manager.get_filtered(sd_hash=claim.stream.source.sd_hash)
else:
raise ResolveError(f"There is nothing to download at {uri} - Source is unknown or unset")
# resume or update an existing stream, if the stream changed: download it and delete the old one after
to_replace, updated_stream = None, None
if existing and existing[0].claim_id != txo.claim_id:
raise ResolveError(f"stream for {existing[0].claim_id} collides with existing download {txo.claim_id}")
if existing:
log.info("claim contains a metadata only update to a stream we have")
if claim.stream.source.bt_infohash:
await self.storage.save_torrent_content_claim(
existing[0].identifier, outpoint, existing[0].torrent_length, existing[0].torrent_name
)
claim_info = await self.storage.get_content_claim_for_torrent(existing[0].identifier)
existing[0].set_claim(claim_info, claim)
else:
await self.storage.save_content_claim(
existing[0].stream_hash, outpoint
)
await source_manager._update_content_claim(existing[0])
updated_stream = existing[0]
else:
existing_for_claim_id = self.get_filtered(claim_id=txo.claim_id)
if existing_for_claim_id:
log.info("claim contains an update to a stream we have, downloading it")
if save_file and existing_for_claim_id[0].output_file_exists:
save_file = False
if not claim.stream.source.bt_infohash:
existing_for_claim_id[0].downloader.node = source_manager.node
await existing_for_claim_id[0].start(timeout=timeout, save_now=save_file)
if not existing_for_claim_id[0].output_file_exists and (
save_file or file_name or download_directory):
await existing_for_claim_id[0].save_file(
file_name=file_name, download_directory=download_directory
)
to_replace = existing_for_claim_id[0]
# resume or update an existing stream, if the stream changed: download it and delete the old one after
if updated_stream:
log.info("already have stream for %s", uri)
if save_file and updated_stream.output_file_exists:
save_file = False
if not claim.stream.source.bt_infohash:
updated_stream.downloader.node = source_manager.node
await updated_stream.start(timeout=timeout, save_now=save_file)
if not updated_stream.output_file_exists and (save_file or file_name or download_directory):
await updated_stream.save_file(
file_name=file_name, download_directory=download_directory
)
return updated_stream
####################
# pay fee
####################
needs_purchasing = (
not to_replace and
not txo.is_my_output and
txo.has_price and
not txo.purchase_receipt
)
if needs_purchasing:
payment = await self.wallet_manager.create_purchase_transaction(
wallet.accounts, txo, exchange_rate_manager
)
####################
# make downloader and wait for start
####################
# temporary with fields we know so downloader can start. Missing fields are populated later.
stored_claim = StoredContentClaim(outpoint=outpoint, claim_id=txo.claim_id, name=txo.claim_name,
amount=txo.amount, height=txo.tx_ref.height,
serialized=claim.to_bytes().hex())
if not claim.stream.source.bt_infohash:
# fixme: this shouldnt be here
stream = ManagedStream(
self.loop, self.config, source_manager.blob_manager, claim.stream.source.sd_hash,
download_directory, file_name, ManagedStream.STATUS_RUNNING, content_fee=payment,
analytics_manager=self.analytics_manager, claim=stored_claim
)
stream.downloader.node = source_manager.node
else:
stream = TorrentSource(
self.loop, self.config, self.storage, identifier=claim.stream.source.bt_infohash,
file_name=file_name, download_directory=download_directory or self.config.download_dir,
status=ManagedStream.STATUS_RUNNING, claim=stored_claim, analytics_manager=self.analytics_manager,
torrent_session=source_manager.torrent_session
)
log.info("starting download for %s", uri)
before_download = self.loop.time()
await stream.start(timeout, save_file)
####################
# success case: delete to_replace if applicable, broadcast fee payment
####################
if to_replace: # delete old stream now that the replacement has started downloading
await source_manager.delete(to_replace)
if payment is not None:
await self.wallet_manager.broadcast_or_release(payment)
payment = None # to avoid releasing in `finally` later
log.info("paid fee of %s for %s", dewies_to_lbc(stream.content_fee.outputs[0].amount), uri)
await self.storage.save_content_fee(stream.stream_hash, stream.content_fee)
source_manager.add(stream)
if not claim.stream.source.bt_infohash:
await self.storage.save_content_claim(stream.stream_hash, outpoint)
else:
await self.storage.save_torrent_content_claim(
stream.identifier, outpoint, stream.torrent_length, stream.torrent_name
)
claim_info = await self.storage.get_content_claim_for_torrent(stream.identifier)
stream.set_claim(claim_info, claim)
if save_file:
await asyncio.wait_for(stream.save_file(), timeout - (self.loop.time() - before_download))
return stream
except asyncio.TimeoutError:
error = DownloadDataTimeoutError(stream.sd_hash)
raise error
except (Exception, asyncio.CancelledError) as err: # forgive data timeout, don't delete stream
expected = (DownloadSDTimeoutError, DownloadDataTimeoutError, InsufficientFundsError,
KeyFeeAboveMaxAllowedError, ResolveError, InvalidStreamURLError)
if isinstance(err, expected):
log.warning("Failed to download %s: %s", uri, str(err))
elif isinstance(err, asyncio.CancelledError):
pass
else:
log.exception("Unexpected error downloading stream:")
error = err
raise
finally:
if payment is not None:
# payment is set to None after broadcasting, if we're here an exception probably happened
await self.wallet_manager.ledger.release_tx(payment)
if self.analytics_manager and claim and claim.stream.source.bt_infohash:
# TODO: analytics for torrents
pass
elif self.analytics_manager and (error or (stream and (stream.downloader.time_to_descriptor or
stream.downloader.time_to_first_bytes))):
server = self.wallet_manager.ledger.network.client.server
self.loop.create_task(
self.analytics_manager.send_time_to_first_bytes(
resolved_time, self.loop.time() - start_time, None if not stream else stream.download_id,
uri, outpoint,
None if not stream else len(stream.downloader.blob_downloader.active_connections),
None if not stream else len(stream.downloader.blob_downloader.scores),
None if not stream else len(stream.downloader.blob_downloader.connection_failures),
False if not stream else stream.downloader.added_fixed_peers,
self.config.fixed_peer_delay if not stream else stream.downloader.fixed_peers_delay,
None if not stream else stream.sd_hash,
None if not stream else stream.downloader.time_to_descriptor,
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].blob_hash,
None if not (stream and stream.descriptor) else stream.descriptor.blobs[0].length,
None if not stream else stream.downloader.time_to_first_bytes,
None if not error else error.__class__.__name__,
None if not error else str(error),
None if not server else f"{server[0]}:{server[1]}"
)
)
async def stream_partial_content(self, request: Request, sd_hash: str):
return await self.source_managers['stream'].stream_partial_content(request, sd_hash)
def get_filtered(self, *args, **kwargs) -> typing.List[ManagedDownloadSource]:
"""
Get a list of filtered and sorted ManagedStream objects
:param sort_by: field to sort by
:param reverse: reverse sorting
:param comparison: comparison operator used for filtering
:param search_by: fields and values to filter by
"""
return sum((manager.get_filtered(*args, **kwargs) for manager in self.source_managers.values()), [])
async def delete(self, source: ManagedDownloadSource, delete_file=False):
for manager in self.source_managers.values():
await manager.delete(source, delete_file)

View file

@ -1,162 +0,0 @@
import os
import asyncio
import typing
import logging
import binascii
from typing import Optional
from lbry.utils import generate_id
from lbry.extras.daemon.storage import StoredContentClaim
if typing.TYPE_CHECKING:
from lbry.conf import Config
from lbry.extras.daemon.analytics import AnalyticsManager
from lbry.wallet.transaction import Transaction
from lbry.extras.daemon.storage import SQLiteStorage
log = logging.getLogger(__name__)
class ManagedDownloadSource:
STATUS_RUNNING = "running"
STATUS_STOPPED = "stopped"
STATUS_FINISHED = "finished"
SAVING_ID = 1
STREAMING_ID = 2
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', storage: 'SQLiteStorage', identifier: str,
file_name: Optional[str] = None, download_directory: Optional[str] = None,
status: Optional[str] = STATUS_STOPPED, claim: Optional[StoredContentClaim] = None,
download_id: Optional[str] = None, rowid: Optional[int] = None,
content_fee: Optional['Transaction'] = None,
analytics_manager: Optional['AnalyticsManager'] = None,
added_on: Optional[int] = None):
self.loop = loop
self.storage = storage
self.config = config
self.identifier = identifier
self.download_directory = download_directory
self._file_name = file_name
self._status = status
self.stream_claim_info = claim
self.download_id = download_id or binascii.hexlify(generate_id()).decode()
self.rowid = rowid
self.content_fee = content_fee
self.purchase_receipt = None
self._added_on = added_on
self.analytics_manager = analytics_manager
self.downloader = None
self.saving = asyncio.Event()
self.finished_writing = asyncio.Event()
self.started_writing = asyncio.Event()
self.finished_write_attempt = asyncio.Event()
# @classmethod
# async def create(cls, loop: asyncio.AbstractEventLoop, config: 'Config', file_path: str,
# key: Optional[bytes] = None,
# iv_generator: Optional[typing.Generator[bytes, None, None]] = None) -> 'ManagedDownloadSource':
# raise NotImplementedError()
async def start(self, timeout: Optional[float] = None, save_now: Optional[bool] = False):
raise NotImplementedError()
async def stop(self, finished: bool = False):
raise NotImplementedError()
async def save_file(self, file_name: Optional[str] = None, download_directory: Optional[str] = None):
raise NotImplementedError()
async def stop_tasks(self):
raise NotImplementedError()
def set_claim(self, claim_info: typing.Dict, claim: 'Claim'):
self.stream_claim_info = StoredContentClaim(
f"{claim_info['txid']}:{claim_info['nout']}", claim_info['claim_id'],
claim_info['name'], claim_info['amount'], claim_info['height'],
binascii.hexlify(claim.to_bytes()).decode(), claim.signing_channel_id, claim_info['address'],
claim_info['claim_sequence'], claim_info.get('channel_name')
)
# async def update_content_claim(self, claim_info: Optional[typing.Dict] = None):
# if not claim_info:
# claim_info = await self.blob_manager.storage.get_content_claim(self.stream_hash)
# self.set_claim(claim_info, claim_info['value'])
@property
def file_name(self) -> Optional[str]:
return self._file_name
@property
def added_on(self) -> Optional[int]:
return self._added_on
@property
def status(self) -> str:
return self._status
@property
def completed(self):
raise NotImplementedError()
# @property
# def stream_url(self):
# return f"http://{self.config.streaming_host}:{self.config.streaming_port}/stream/{self.sd_hash}
@property
def finished(self) -> bool:
return self.status == self.STATUS_FINISHED
@property
def running(self) -> bool:
return self.status == self.STATUS_RUNNING
@property
def claim_id(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.claim_id
@property
def txid(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.txid
@property
def nout(self) -> Optional[int]:
return None if not self.stream_claim_info else self.stream_claim_info.nout
@property
def outpoint(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.outpoint
@property
def claim_height(self) -> Optional[int]:
return None if not self.stream_claim_info else self.stream_claim_info.height
@property
def channel_claim_id(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.channel_claim_id
@property
def channel_name(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.channel_name
@property
def claim_name(self) -> Optional[str]:
return None if not self.stream_claim_info else self.stream_claim_info.claim_name
@property
def metadata(self) -> Optional[typing.Dict]:
return None if not self.stream_claim_info else self.stream_claim_info.claim.stream.to_dict()
@property
def metadata_protobuf(self) -> bytes:
if self.stream_claim_info:
return binascii.hexlify(self.stream_claim_info.claim.to_bytes())
@property
def full_path(self) -> Optional[str]:
return os.path.join(self.download_directory, os.path.basename(self.file_name)) \
if self.file_name and self.download_directory else None
@property
def output_file_exists(self):
return os.path.isfile(self.full_path) if self.full_path else False

View file

@ -1,138 +0,0 @@
import os
import asyncio
import logging
import typing
from typing import Optional
from lbry.file.source import ManagedDownloadSource
if typing.TYPE_CHECKING:
from lbry.conf import Config
from lbry.extras.daemon.analytics import AnalyticsManager
from lbry.extras.daemon.storage import SQLiteStorage
log = logging.getLogger(__name__)
COMPARISON_OPERATORS = {
'eq': lambda a, b: a == b,
'ne': lambda a, b: a != b,
'g': lambda a, b: a > b,
'l': lambda a, b: a < b,
'ge': lambda a, b: a >= b,
'le': lambda a, b: a <= b,
}
class SourceManager:
filter_fields = {
'rowid',
'status',
'file_name',
'added_on',
'download_path',
'claim_name',
'claim_height',
'claim_id',
'outpoint',
'txid',
'nout',
'channel_claim_id',
'channel_name',
'completed'
}
set_filter_fields = {
"claim_ids": "claim_id",
"channel_claim_ids": "channel_claim_id",
"outpoints": "outpoint"
}
source_class = ManagedDownloadSource
def __init__(self, loop: asyncio.AbstractEventLoop, config: 'Config', storage: 'SQLiteStorage',
analytics_manager: Optional['AnalyticsManager'] = None):
self.loop = loop
self.config = config
self.storage = storage
self.analytics_manager = analytics_manager
self._sources: typing.Dict[str, ManagedDownloadSource] = {}
self.started = asyncio.Event()
def add(self, source: ManagedDownloadSource):
self._sources[source.identifier] = source
async def remove(self, source: ManagedDownloadSource):
if source.identifier not in self._sources:
return
self._sources.pop(source.identifier)
await source.stop_tasks()
async def initialize_from_database(self):
raise NotImplementedError()
async def start(self):
await self.initialize_from_database()
self.started.set()
async def stop(self):
while self._sources:
_, source = self._sources.popitem()
await source.stop_tasks()
self.started.clear()
async def create(self, file_path: str, key: Optional[bytes] = None,
iv_generator: Optional[typing.Generator[bytes, None, None]] = None) -> ManagedDownloadSource:
raise NotImplementedError()
async def delete(self, source: ManagedDownloadSource, delete_file: Optional[bool] = False):
await self.remove(source)
if delete_file and source.output_file_exists:
os.remove(source.full_path)
def get_filtered(self, sort_by: Optional[str] = None, reverse: Optional[bool] = False,
comparison: Optional[str] = None, **search_by) -> typing.List[ManagedDownloadSource]:
"""
Get a list of filtered and sorted ManagedStream objects
:param sort_by: field to sort by
:param reverse: reverse sorting
:param comparison: comparison operator used for filtering
:param search_by: fields and values to filter by
"""
if sort_by and sort_by not in self.filter_fields:
raise ValueError(f"'{sort_by}' is not a valid field to sort by")
if comparison and comparison not in COMPARISON_OPERATORS:
raise ValueError(f"'{comparison}' is not a valid comparison")
if 'full_status' in search_by:
del search_by['full_status']
for search in search_by:
if search not in self.filter_fields:
raise ValueError(f"'{search}' is not a valid search operation")
compare_sets = {}
if isinstance(search_by.get('claim_id'), list):
compare_sets['claim_ids'] = search_by.pop('claim_id')
if isinstance(search_by.get('outpoint'), list):
compare_sets['outpoints'] = search_by.pop('outpoint')
if isinstance(search_by.get('channel_claim_id'), list):
compare_sets['channel_claim_ids'] = search_by.pop('channel_claim_id')
if search_by or compare_sets:
comparison = comparison or 'eq'
streams = []
for stream in self._sources.values():
if compare_sets and not all(
getattr(stream, self.set_filter_fields[set_search]) in val
for set_search, val in compare_sets.items()):
continue
if search_by and not all(
COMPARISON_OPERATORS[comparison](getattr(stream, search), val)
for search, val in search_by.items()):
continue
streams.append(stream)
else:
streams = list(self._sources.values())
if sort_by:
streams.sort(key=lambda s: getattr(s, sort_by) or "")
if reverse:
streams.reverse()
return streams

View file

@ -1,456 +0,0 @@
import asyncio
import json
import logging
import os
import pathlib
import platform
import re
import shlex
import shutil
import subprocess
from math import ceil
import lbry.utils
from lbry.conf import TranscodeConfig
log = logging.getLogger(__name__)
class VideoFileAnalyzer:
def _replace_or_pop_env(self, variable):
if variable + '_ORIG' in self._env_copy:
self._env_copy[variable] = self._env_copy[variable + '_ORIG']
else:
self._env_copy.pop(variable, None)
def __init__(self, conf: TranscodeConfig):
self._conf = conf
self._available_encoders = ""
self._ffmpeg_installed = None
self._which_ffmpeg = None
self._which_ffprobe = None
self._env_copy = dict(os.environ)
self._checked_ffmpeg = False
if lbry.utils.is_running_from_bundle():
# handle the situation where PyInstaller overrides our runtime environment:
self._replace_or_pop_env('LD_LIBRARY_PATH')
@staticmethod
def _execute(command, environment):
# log.debug("Executing: %s", command)
try:
with subprocess.Popen(
shlex.split(command) if platform.system() != 'Windows' else command,
stdout=subprocess.PIPE, stderr=subprocess.PIPE, env=environment
) as process:
(stdout, stderr) = process.communicate() # blocks until the process exits
return stdout.decode(errors='replace') + stderr.decode(errors='replace'), process.returncode
except subprocess.SubprocessError as e:
return str(e), -1
# This create_subprocess_exec call is broken in Windows Python 3.7, but it's prettier than what's here.
# The recommended fix is switching to ProactorEventLoop, but that breaks UDP in Linux Python 3.7.
# We work around that issue here by using run_in_executor. Check it again in Python 3.8.
async def _execute_ffmpeg(self, arguments):
arguments = self._which_ffmpeg + " " + arguments
return await asyncio.get_event_loop().run_in_executor(None, self._execute, arguments, self._env_copy)
async def _execute_ffprobe(self, arguments):
arguments = self._which_ffprobe + " " + arguments
return await asyncio.get_event_loop().run_in_executor(None, self._execute, arguments, self._env_copy)
async def _verify_executables(self):
try:
await self._execute_ffprobe("-version")
version, code = await self._execute_ffmpeg("-version")
except Exception as e:
code = -1
version = str(e)
if code != 0 or not version.startswith("ffmpeg"):
log.warning("Unable to run ffmpeg, but it was requested. Code: %d; Message: %s", code, version)
raise FileNotFoundError("Unable to locate or run ffmpeg or ffprobe. Please install FFmpeg "
"and ensure that it is callable via PATH or conf.ffmpeg_path")
log.debug("Using %s at %s", version.splitlines()[0].split(" Copyright")[0], self._which_ffmpeg)
return version
@staticmethod
def _which_ffmpeg_and_ffmprobe(path):
return shutil.which("ffmpeg", path=path), shutil.which("ffprobe", path=path)
async def _verify_ffmpeg_installed(self):
if self._ffmpeg_installed:
return
self._ffmpeg_installed = False
path = self._conf.ffmpeg_path
if hasattr(self._conf, "data_dir"):
path += os.path.pathsep + os.path.join(getattr(self._conf, "data_dir"), "ffmpeg", "bin")
path += os.path.pathsep + self._env_copy.get("PATH", "")
self._which_ffmpeg, self._which_ffprobe = await asyncio.get_running_loop().run_in_executor(
None, self._which_ffmpeg_and_ffmprobe, path
)
if not self._which_ffmpeg:
log.warning("Unable to locate ffmpeg executable. Path: %s", path)
raise FileNotFoundError(f"Unable to locate ffmpeg executable. Path: {path}")
if not self._which_ffprobe:
log.warning("Unable to locate ffprobe executable. Path: %s", path)
raise FileNotFoundError(f"Unable to locate ffprobe executable. Path: {path}")
if os.path.dirname(self._which_ffmpeg) != os.path.dirname(self._which_ffprobe):
log.warning("ffmpeg and ffprobe are in different folders!")
await self._verify_executables()
self._ffmpeg_installed = True
async def status(self, reset=False, recheck=False):
if reset:
self._available_encoders = ""
self._ffmpeg_installed = None
if self._checked_ffmpeg and not recheck:
pass
elif self._ffmpeg_installed is None:
try:
await self._verify_ffmpeg_installed()
except FileNotFoundError:
pass
self._checked_ffmpeg = True
return {
"available": self._ffmpeg_installed,
"which": self._which_ffmpeg,
"analyze_audio_volume": int(self._conf.volume_analysis_time) > 0
}
@staticmethod
def _verify_container(scan_data: json):
container = scan_data["format"]["format_name"]
log.debug(" Detected container is %s", container)
splits = container.split(",")
if not {"webm", "mp4", "3gp", "ogg"}.intersection(splits):
return "Container format is not in the approved list of WebM, MP4. " \
f"Actual: {container} [{scan_data['format']['format_long_name']}]"
if "matroska" in splits:
for stream in scan_data["streams"]:
if stream["codec_type"] == "video":
codec = stream["codec_name"]
if not {"vp8", "vp9", "av1"}.intersection(codec.split(",")):
return "WebM format requires VP8/9 or AV1 video. " \
f"Actual: {codec} [{stream['codec_long_name']}]"
elif stream["codec_type"] == "audio":
codec = stream["codec_name"]
if not {"vorbis", "opus"}.intersection(codec.split(",")):
return "WebM format requires Vorbis or Opus audio. " \
f"Actual: {codec} [{stream['codec_long_name']}]"
return ""
@staticmethod
def _verify_video_encoding(scan_data: json):
for stream in scan_data["streams"]:
if stream["codec_type"] != "video":
continue
codec = stream["codec_name"]
log.debug(" Detected video codec is %s, format is %s", codec, stream["pix_fmt"])
if not {"h264", "vp8", "vp9", "av1", "theora"}.intersection(codec.split(",")):
return "Video codec is not in the approved list of H264, VP8, VP9, AV1, Theora. " \
f"Actual: {codec} [{stream['codec_long_name']}]"
if "h264" in codec.split(",") and stream["pix_fmt"] != "yuv420p":
return "Video codec is H264, but its pixel format does not match the approved yuv420p. " \
f"Actual: {stream['pix_fmt']}"
return ""
def _verify_bitrate(self, scan_data: json, file_path):
bit_rate_max = float(self._conf.video_bitrate_maximum)
if bit_rate_max <= 0:
return ""
if "bit_rate" in scan_data["format"]:
bit_rate = float(scan_data["format"]["bit_rate"])
else:
bit_rate = os.stat(file_path).st_size / float(scan_data["format"]["duration"])
log.debug(" Detected bitrate is %s Mbps. Allowed max: %s Mbps",
str(bit_rate / 1000000.0), str(bit_rate_max / 1000000.0))
if bit_rate > bit_rate_max:
return "The bit rate is above the configured maximum. Actual: " \
f"{bit_rate / 1000000.0} Mbps; Allowed max: {bit_rate_max / 1000000.0} Mbps"
return ""
async def _verify_fast_start(self, scan_data: json, video_file):
container = scan_data["format"]["format_name"]
if {"webm", "ogg"}.intersection(container.split(",")):
return ""
result, _ = await self._execute_ffprobe(f'-v debug "{video_file}"')
match = re.search(r"Before avformat_find_stream_info.+?\s+seeks:(\d+)\s+", result)
if match and int(match.group(1)) != 0:
return "Video stream descriptors are not at the start of the file (the faststart flag was not used)."
return ""
@staticmethod
def _verify_audio_encoding(scan_data: json):
for stream in scan_data["streams"]:
if stream["codec_type"] != "audio":
continue
codec = stream["codec_name"]
log.debug(" Detected audio codec is %s", codec)
if not {"aac", "mp3", "flac", "vorbis", "opus"}.intersection(codec.split(",")):
return "Audio codec is not in the approved list of AAC, FLAC, MP3, Vorbis, and Opus. " \
f"Actual: {codec} [{stream['codec_long_name']}]"
if int(stream['sample_rate']) > 48000:
return "Sample rate out of range"
return ""
async def _verify_audio_volume(self, seconds, video_file):
try:
validate_volume = int(seconds) > 0
except ValueError:
validate_volume = False
if not validate_volume:
return ""
result, _ = await self._execute_ffmpeg(f'-i "{video_file}" -t {seconds} '
f'-af volumedetect -vn -sn -dn -f null "{os.devnull}"')
try:
mean_volume = float(re.search(r"mean_volume:\s+([-+]?\d*\.\d+|\d+)", result).group(1))
max_volume = float(re.search(r"max_volume:\s+([-+]?\d*\.\d+|\d+)", result).group(1))
except Exception as e:
log.debug(" Failure in volume analysis. Message: %s", str(e))
return ""
if max_volume < -5.0 and mean_volume < -22.0:
return "Audio is at least five dB lower than prime. " \
f"Actual max: {max_volume}, mean: {mean_volume}"
log.debug(" Detected audio volume has mean, max of %f, %f dB", mean_volume, max_volume)
return ""
@staticmethod
def _compute_crf(scan_data):
height = 240.0
for stream in scan_data["streams"]:
if stream["codec_type"] == "video":
height = max(height, float(stream["height"]))
# https://developers.google.com/media/vp9/settings/vod/
return int(-0.011 * height + 40)
def _get_video_scaler(self):
return self._conf.video_scaler
async def _get_video_encoder(self, scan_data):
# use what the user said if it's there:
# if it's not there, use h264 if we can because it's way faster than the others
# if we don't have h264 use vp9; it's fairly compatible even though it's slow
if not self._available_encoders:
self._available_encoders, _ = await self._execute_ffmpeg("-encoders -v quiet")
encoder = self._conf.video_encoder.split(" ", 1)[0]
if re.search(fr"^\s*V..... {encoder} ", self._available_encoders, re.MULTILINE):
return self._conf.video_encoder
if re.search(r"^\s*V..... libx264 ", self._available_encoders, re.MULTILINE):
if encoder:
log.warning(" Using libx264 since the requested encoder was unavailable. Requested: %s", encoder)
return 'libx264 -crf 19 -vf "format=yuv420p"'
if not encoder:
encoder = "libx264"
if re.search(r"^\s*V..... libvpx-vp9 ", self._available_encoders, re.MULTILINE):
log.warning(" Using libvpx-vp9 since the requested encoder was unavailable. Requested: %s", encoder)
crf = self._compute_crf(scan_data)
return f"libvpx-vp9 -crf {crf} -b:v 0"
if re.search(r"^\s*V..... libtheora", self._available_encoders, re.MULTILINE):
log.warning(" Using libtheora since the requested encoder was unavailable. Requested: %s", encoder)
return "libtheora -q:v 7"
raise Exception(f"The video encoder is not available. Requested: {encoder}")
async def _get_audio_encoder(self, extension):
# if the video encoding is theora or av1/vp8/vp9 use opus (or fallback to vorbis)
# or we don't have a video encoding but we have an ogg or webm container use opus
# if we need to use opus/vorbis see if the conf file has it else use our own params
# else use the user-set value if it exists
# else use aac
wants_opus = extension != "mp4"
if not self._available_encoders:
self._available_encoders, _ = await self._execute_ffmpeg("-encoders -v quiet")
encoder = self._conf.audio_encoder.split(" ", 1)[0]
if wants_opus and 'opus' in encoder:
return self._conf.audio_encoder
if wants_opus and re.search(r"^\s*A..... libopus ", self._available_encoders, re.MULTILINE):
return "libopus -b:a 160k"
if wants_opus and 'vorbis' in encoder:
return self._conf.audio_encoder
if wants_opus and re.search(r"^\s*A..... libvorbis ", self._available_encoders, re.MULTILINE):
return "libvorbis -q:a 6"
if re.search(fr"^\s*A..... {encoder} ", self._available_encoders, re.MULTILINE):
return self._conf.audio_encoder
if re.search(r"^\s*A..... aac ", self._available_encoders, re.MULTILINE):
return "aac -b:a 192k"
raise Exception(f"The audio encoder is not available. Requested: {encoder or 'aac'}")
@staticmethod
def _get_best_container_extension(scan_data, video_encoder):
# the container is chosen by the video format
# if we are theora-encoded, we want ogg
# if we are vp8/vp9/av1 we want webm
# use mp4 for anything else
if video_encoder: # not re-encoding video
if "theora" in video_encoder:
return "ogv"
if re.search(r"vp[89x]|av1", video_encoder.split(" ", 1)[0]):
return "webm"
return "mp4"
for stream in scan_data["streams"]:
if stream["codec_type"] != "video":
continue
codec = stream["codec_name"].split(",")
if "theora" in codec:
return "ogv"
if {"vp8", "vp9", "av1"}.intersection(codec):
return "webm"
return "mp4"
async def _get_scan_data(self, validate, file_path):
arguments = f'-v quiet -print_format json -show_format -show_streams "{file_path}"'
result, _ = await self._execute_ffprobe(arguments)
try:
scan_data = json.loads(result)
except Exception as e:
log.debug("Failure in JSON parsing ffprobe results. Message: %s", str(e))
raise ValueError(f'Absent or unreadable video file: {file_path}')
if "format" not in scan_data or "duration" not in scan_data["format"]:
log.debug("Format data is missing from ffprobe results for: %s", file_path)
raise ValueError(f'Media file does not appear to contain video content: {file_path}')
if float(scan_data["format"]["duration"]) < 0.1:
log.debug("Media file appears to be an image: %s", file_path)
raise ValueError(f'Assuming image file at: {file_path}')
return scan_data
@staticmethod
def _build_spec(scan_data):
assert scan_data
duration = ceil(float(scan_data["format"]["duration"])) # existence verified when scan_data made
width = -1
height = -1
for stream in scan_data["streams"]:
if stream["codec_type"] != "video":
continue
width = max(width, int(stream["width"]))
height = max(height, int(stream["height"]))
log.debug(" Detected duration: %d sec. with resolution: %d x %d", duration, width, height)
spec = {"duration": duration}
if height >= 0:
spec["height"] = height
if width >= 0:
spec["width"] = width
return spec
async def verify_or_repair(self, validate, repair, file_path, ignore_non_video=False):
if not validate and not repair:
return file_path, {}
if ignore_non_video and not file_path:
return file_path, {}
await self._verify_ffmpeg_installed()
try:
scan_data = await self._get_scan_data(validate, file_path)
except ValueError:
if ignore_non_video:
return file_path, {}
raise
fast_start_msg = await self._verify_fast_start(scan_data, file_path)
log.debug("Analyzing %s:", file_path)
spec = self._build_spec(scan_data)
log.debug(" Detected faststart is %s", "false" if fast_start_msg else "true")
container_msg = self._verify_container(scan_data)
bitrate_msg = self._verify_bitrate(scan_data, file_path)
video_msg = self._verify_video_encoding(scan_data)
audio_msg = self._verify_audio_encoding(scan_data)
volume_msg = await self._verify_audio_volume(self._conf.volume_analysis_time, file_path)
messages = [container_msg, bitrate_msg, fast_start_msg, video_msg, audio_msg, volume_msg]
if not any(messages):
return file_path, spec
if not repair:
errors = ["Streamability verification failed:"]
errors.extend(filter(None, messages))
raise Exception("\n ".join(errors))
# the plan for transcoding:
# we have to re-encode the video if it is in a nonstandard format
# we also re-encode if we are h264 but not yuv420p (both errors caught in video_msg)
# we also re-encode if our bitrate or sample rate is too high
try:
transcode_command = [f'-i "{file_path}" -y -c:s copy -c:d copy -c:v']
video_encoder = ""
if video_msg or bitrate_msg:
video_encoder = await self._get_video_encoder(scan_data)
transcode_command.append(video_encoder)
# could do the scaling only if bitrate_msg, but if we're going to the effort to re-encode anyway...
transcode_command.append(self._get_video_scaler())
else:
transcode_command.append("copy")
transcode_command.append("-movflags +faststart -c:a")
extension = self._get_best_container_extension(scan_data, video_encoder)
if audio_msg or volume_msg:
audio_encoder = await self._get_audio_encoder(extension)
transcode_command.append(audio_encoder)
if volume_msg and self._conf.volume_filter:
transcode_command.append(self._conf.volume_filter)
if audio_msg == "Sample rate out of range":
transcode_command.append(" -ar 48000 ")
else:
transcode_command.append("copy")
# TODO: put it in a temp folder and delete it after we upload?
path = pathlib.Path(file_path)
output = path.parent / f"{path.stem}_fixed.{extension}"
transcode_command.append(f'"{output}"')
ffmpeg_command = " ".join(transcode_command)
log.info("Proceeding on transcode via: ffmpeg %s", ffmpeg_command)
result, code = await self._execute_ffmpeg(ffmpeg_command)
if code != 0:
raise Exception(f"Failure to complete the transcode command. Output: {result}")
except Exception as e:
if validate:
raise
log.info("Unable to transcode %s . Message: %s", file_path, str(e))
# TODO: delete partial output file here if it exists?
return file_path, spec
return str(output), spec

View file

Before

Width:  |  Height:  |  Size: 7.4 KiB

After

Width:  |  Height:  |  Size: 7.4 KiB

View file

Before

Width:  |  Height:  |  Size: 18 KiB

After

Width:  |  Height:  |  Size: 18 KiB

View file

Before

Width:  |  Height:  |  Size: 1.2 KiB

After

Width:  |  Height:  |  Size: 1.2 KiB

View file

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

View file

Before

Width:  |  Height:  |  Size: 6.1 KiB

After

Width:  |  Height:  |  Size: 6.1 KiB

View file

Before

Width:  |  Height:  |  Size: 97 KiB

After

Width:  |  Height:  |  Size: 97 KiB

View file

Before

Width:  |  Height:  |  Size: 1.1 KiB

After

Width:  |  Height:  |  Size: 1.1 KiB

View file

Before

Width:  |  Height:  |  Size: 361 KiB

After

Width:  |  Height:  |  Size: 361 KiB

View file

Before

Width:  |  Height:  |  Size: 5.3 KiB

After

Width:  |  Height:  |  Size: 5.3 KiB

View file

Before

Width:  |  Height:  |  Size: 15 KiB

After

Width:  |  Height:  |  Size: 15 KiB

View file

Before

Width:  |  Height:  |  Size: 31 KiB

After

Width:  |  Height:  |  Size: 31 KiB

View file

Before

Width:  |  Height:  |  Size: 2.6 KiB

After

Width:  |  Height:  |  Size: 2.6 KiB

4
lbry/lbry/__init__.py Normal file
View file

@ -0,0 +1,4 @@
__name__ = "lbry"
__version__ = "0.40.0"
version = tuple(__version__.split('.'))

Some files were not shown because too many files have changed in this diff Show more