initial import
This commit is contained in:
commit
ece2db08da
44 changed files with 14042 additions and 0 deletions
108
.gitignore
vendored
Normal file
108
.gitignore
vendored
Normal file
|
@ -0,0 +1,108 @@
|
|||
# Byte-compiled / optimized / DLL files
|
||||
__pycache__/
|
||||
*.py[cod]
|
||||
*$py.class
|
||||
|
||||
# C extensions
|
||||
*.so
|
||||
|
||||
# Distribution / packaging
|
||||
.Python
|
||||
env/
|
||||
build/
|
||||
develop-eggs/
|
||||
dist/
|
||||
downloads/
|
||||
eggs/
|
||||
.eggs/
|
||||
lib/
|
||||
lib64/
|
||||
parts/
|
||||
sdist/
|
||||
var/
|
||||
wheels/
|
||||
*.egg-info/
|
||||
.installed.cfg
|
||||
*.egg
|
||||
|
||||
# PyInstaller
|
||||
# Usually these files are written by a python script from a template
|
||||
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||
*.manifest
|
||||
*.spec
|
||||
|
||||
# Installer logs
|
||||
pip-log.txt
|
||||
pip-delete-this-directory.txt
|
||||
|
||||
# Unit test / coverage reports
|
||||
htmlcov/
|
||||
.tox/
|
||||
.coverage
|
||||
.coverage.*
|
||||
.cache
|
||||
nosetests.xml
|
||||
coverage.xml
|
||||
*.cover
|
||||
.hypothesis/
|
||||
|
||||
# Translations
|
||||
*.mo
|
||||
*.pot
|
||||
|
||||
# Django stuff:
|
||||
*.log
|
||||
local_settings.py
|
||||
|
||||
# Flask stuff:
|
||||
instance/
|
||||
.webassets-cache
|
||||
|
||||
# Scrapy stuff:
|
||||
.scrapy
|
||||
|
||||
# Sphinx documentation
|
||||
docs/_build/
|
||||
|
||||
# PyBuilder
|
||||
target/
|
||||
|
||||
# Jupyter Notebook
|
||||
.ipynb_checkpoints
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
||||
# SageMath parsed files
|
||||
*.sage.py
|
||||
|
||||
# dotenv
|
||||
.env
|
||||
|
||||
# virtualenv
|
||||
.venv
|
||||
venv/
|
||||
ENV/
|
||||
|
||||
# Spyder project settings
|
||||
.spyderproject
|
||||
.spyproject
|
||||
|
||||
# Rope project settings
|
||||
.ropeproject
|
||||
|
||||
# mkdocs documentation
|
||||
/site
|
||||
|
||||
# mypy
|
||||
.mypy_cache/
|
||||
|
||||
|
||||
# pycharm configuration
|
||||
.idea/
|
||||
|
||||
bin/
|
||||
data/
|
19
.travis.yml
Normal file
19
.travis.yml
Normal file
|
@ -0,0 +1,19 @@
|
|||
sudo: false
|
||||
language: python
|
||||
|
||||
python:
|
||||
- "2.7"
|
||||
- "3.6"
|
||||
|
||||
install: pip install tox-travis coverage
|
||||
|
||||
script:
|
||||
- tox
|
||||
|
||||
after_success:
|
||||
- coverage combine tests/
|
||||
- bash <(curl -s https://codecov.io/bash)
|
||||
|
||||
branches:
|
||||
only:
|
||||
- master
|
0
CHANGELOG.md
Normal file
0
CHANGELOG.md
Normal file
21
LICENSE
Normal file
21
LICENSE
Normal file
|
@ -0,0 +1,21 @@
|
|||
MIT License
|
||||
|
||||
Copyright (c) 2018 LBRY Inc.
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||
SOFTWARE.
|
4
MANIFEST.in
Normal file
4
MANIFEST.in
Normal file
|
@ -0,0 +1,4 @@
|
|||
include README.md
|
||||
include CHANGELOG.md
|
||||
include LICENSE
|
||||
recursive-include torba *.txt *.py
|
0
README.md
Normal file
0
README.md
Normal file
7
setup.cfg
Normal file
7
setup.cfg
Normal file
|
@ -0,0 +1,7 @@
|
|||
[coverage:run]
|
||||
branch = True
|
||||
|
||||
[coverage:paths]
|
||||
source =
|
||||
torba
|
||||
.tox/*/lib/python*/site-packages/torba
|
45
setup.py
Normal file
45
setup.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
import os
|
||||
import re
|
||||
from setuptools import setup, find_packages
|
||||
|
||||
init_file = open(os.path.join(os.path.dirname(__path__), 'torba', '__init__.py')).read()
|
||||
version = re.search('\d+\.\d+\.\d+', init_file).group()
|
||||
|
||||
setup(
|
||||
name='torba',
|
||||
version=version,
|
||||
url='https://github.com/lbryio/torba',
|
||||
license='MIT',
|
||||
author='LBRY Inc.',
|
||||
author_email='hello@lbry.io',
|
||||
description='Wallet library for bitcoin based currencies.',
|
||||
keywords='wallet,crypto,currency,money,bitcoin,lbry',
|
||||
classifiers=(
|
||||
'Framework :: Twisted',
|
||||
'Intended Audience :: Developers',
|
||||
'Intended Audience :: System Administrators',
|
||||
'License :: OSI Approved :: MIT License',
|
||||
'Programming Language :: Python :: 2.7',
|
||||
'Programming Language :: Python :: 3',
|
||||
'Operating System :: OS Independent',
|
||||
'Topic :: Internet',
|
||||
'Topic :: Software Development :: Libraries :: Python Modules',
|
||||
'Topic :: System :: Distributed Computing',
|
||||
'Topic :: Utilities',
|
||||
),
|
||||
packages=find_packages(exclude=('tests',)),
|
||||
include_package_data=True,
|
||||
python_requires='>=2.7,>=3.6',
|
||||
install_requires=(
|
||||
'twisted',
|
||||
'ecdsa',
|
||||
'pbkdf2',
|
||||
'cryptography',
|
||||
'typing'
|
||||
),
|
||||
extras_require={
|
||||
'test': (
|
||||
'mock',
|
||||
)
|
||||
}
|
||||
)
|
0
tests/unit/__init__.py
Normal file
0
tests/unit/__init__.py
Normal file
43
tests/unit/ftc.py
Normal file
43
tests/unit/ftc.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
from six import int2byte
|
||||
from binascii import unhexlify
|
||||
from torba.baseledger import BaseLedger
|
||||
from torba.basenetwork import BaseNetwork
|
||||
from torba.basescript import BaseInputScript, BaseOutputScript
|
||||
from torba.basetransaction import BaseTransaction, BaseInput, BaseOutput
|
||||
from torba.basecoin import BaseCoin
|
||||
|
||||
|
||||
class Ledger(BaseLedger):
|
||||
network_class = BaseNetwork
|
||||
|
||||
|
||||
class Input(BaseInput):
|
||||
script_class = BaseInputScript
|
||||
|
||||
|
||||
class Output(BaseOutput):
|
||||
script_class = BaseOutputScript
|
||||
|
||||
|
||||
class Transaction(BaseTransaction):
|
||||
input_class = Input
|
||||
output_class = Output
|
||||
|
||||
|
||||
class FTC(BaseCoin):
|
||||
name = 'Fakecoin'
|
||||
symbol = 'FTC'
|
||||
network = 'mainnet'
|
||||
|
||||
ledger_class = Ledger
|
||||
transaction_class = Transaction
|
||||
|
||||
pubkey_address_prefix = int2byte(0x00)
|
||||
script_address_prefix = int2byte(0x05)
|
||||
extended_public_key_prefix = unhexlify('0488b21e')
|
||||
extended_private_key_prefix = unhexlify('0488ade4')
|
||||
|
||||
default_fee_per_byte = 50
|
||||
|
||||
def __init__(self, ledger, fee_per_byte=default_fee_per_byte):
|
||||
super(FTC, self).__init__(ledger, fee_per_byte)
|
105
tests/unit/test_account.py
Normal file
105
tests/unit/test_account.py
Normal file
|
@ -0,0 +1,105 @@
|
|||
from binascii import hexlify
|
||||
from twisted.trial import unittest
|
||||
|
||||
from torba.coin.btc import BTC
|
||||
from torba.manager import WalletManager
|
||||
from torba.wallet import Account
|
||||
|
||||
|
||||
class TestAccount(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
ledger = WalletManager().get_or_create_ledger(BTC.get_id())
|
||||
self.coin = BTC(ledger)
|
||||
|
||||
def test_generate_account(self):
|
||||
account = Account.generate(self.coin, u"torba")
|
||||
self.assertEqual(account.coin, self.coin)
|
||||
self.assertIsNotNone(account.seed)
|
||||
self.assertEqual(account.public_key.coin, self.coin)
|
||||
self.assertEqual(account.private_key.public_key, account.public_key)
|
||||
|
||||
self.assertEqual(len(account.receiving_keys.child_keys), 0)
|
||||
self.assertEqual(len(account.receiving_keys.addresses), 0)
|
||||
self.assertEqual(len(account.change_keys.child_keys), 0)
|
||||
self.assertEqual(len(account.change_keys.addresses), 0)
|
||||
|
||||
account.ensure_enough_addresses()
|
||||
self.assertEqual(len(account.receiving_keys.child_keys), 20)
|
||||
self.assertEqual(len(account.receiving_keys.addresses), 20)
|
||||
self.assertEqual(len(account.change_keys.child_keys), 6)
|
||||
self.assertEqual(len(account.change_keys.addresses), 6)
|
||||
|
||||
def test_generate_account_from_seed(self):
|
||||
account = Account.from_seed(
|
||||
self.coin,
|
||||
u"carbon smart garage balance margin twelve chest sword toast envelope bottom stomach ab"
|
||||
u"sent",
|
||||
u"torba"
|
||||
)
|
||||
self.assertEqual(
|
||||
account.private_key.extended_key_string(),
|
||||
'xprv9s21ZrQH143K2dyhK7SevfRG72bYDRNv25yKPWWm6dqApNxm1Zb1m5gGcBWYfbsPjTr2v5joit8Af2Zp5P'
|
||||
'6yz3jMbycrLrRMpeAJxR8qDg8'
|
||||
)
|
||||
self.assertEqual(
|
||||
account.public_key.extended_key_string(),
|
||||
'xpub661MyMwAqRbcF84AR8yfHoMzf4S2ct6mPJtvBtvNeyN9hBHuZ6uGJszkTSn5fQUCdz3XU17eBzFeAUwV6f'
|
||||
'iW44g14WF52fYC5J483wqQ5ZP'
|
||||
)
|
||||
self.assertEqual(
|
||||
account.receiving_keys.generate_next_address(),
|
||||
'1PmX9T3sCiDysNtWszJa44SkKcpGc2NaXP'
|
||||
)
|
||||
private_key = account.get_private_key_for_address('1PmX9T3sCiDysNtWszJa44SkKcpGc2NaXP')
|
||||
self.assertEqual(
|
||||
private_key.extended_key_string(),
|
||||
'xprv9xNEfQ296VTRaEUDZ8oKq74xw2U6kpj486vFUB4K1wT9U25GX4UwuzFgJN1YuRrqkQ5TTwCpkYnjNpSoHS'
|
||||
'BaEigNHPkoeYbuPMRo6mRUjxg'
|
||||
)
|
||||
self.assertIsNone(account.get_private_key_for_address('BcQjRlhDOIrQez1WHfz3whnB33Bp34sUgX'))
|
||||
|
||||
self.assertEqual(
|
||||
hexlify(private_key.wif()),
|
||||
b'1cc27be89ad47ef932562af80e95085eb0ab2ae3e5c019b1369b8b05ff2e94512f01'
|
||||
)
|
||||
|
||||
def test_load_and_save_account(self):
|
||||
account_data = {
|
||||
'seed':
|
||||
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
|
||||
"h absent",
|
||||
'encrypted': False,
|
||||
'private_key':
|
||||
'xprv9s21ZrQH143K2dyhK7SevfRG72bYDRNv25yKPWWm6dqApNxm1Zb1m5gGcBWYfbsPjTr2v5joit8Af2Zp5P'
|
||||
'6yz3jMbycrLrRMpeAJxR8qDg8',
|
||||
'public_key':
|
||||
'xpub661MyMwAqRbcF84AR8yfHoMzf4S2ct6mPJtvBtvNeyN9hBHuZ6uGJszkTSn5fQUCdz3XU17eBzFeAUwV6f'
|
||||
'iW44g14WF52fYC5J483wqQ5ZP',
|
||||
'receiving_gap': 10,
|
||||
'receiving_keys': [
|
||||
'0222345947a59dca4a3363ffa81ac87dd907d2b2feff57383eaeddbab266ca5f2d',
|
||||
'03fdc9826d5d00a484188cba8eb7dba5877c0323acb77905b7bcbbab35d94be9f6'
|
||||
],
|
||||
'change_gap': 10,
|
||||
'change_keys': [
|
||||
'038836be4147836ed6b4df6a89e0d9f1b1c11cec529b7ff5407de57f2e5b032c83'
|
||||
]
|
||||
}
|
||||
|
||||
account = Account.from_dict(self.coin, account_data)
|
||||
|
||||
self.assertEqual(len(account.receiving_keys.addresses), 2)
|
||||
self.assertEqual(
|
||||
account.receiving_keys.addresses[0],
|
||||
'1PmX9T3sCiDysNtWszJa44SkKcpGc2NaXP'
|
||||
)
|
||||
self.assertEqual(len(account.change_keys.addresses), 1)
|
||||
self.assertEqual(
|
||||
account.change_keys.addresses[0],
|
||||
'1PUbu1D1f3c244JPRSJKBCxRqui5NT6geR'
|
||||
)
|
||||
|
||||
self.maxDiff = None
|
||||
account_data['coin'] = 'btc_mainnet'
|
||||
self.assertDictEqual(account_data, account.to_dict())
|
157
tests/unit/test_coinselection.py
Normal file
157
tests/unit/test_coinselection.py
Normal file
|
@ -0,0 +1,157 @@
|
|||
import unittest
|
||||
|
||||
from torba.coin.btc import BTC
|
||||
from torba.coinselection import CoinSelector, MAXIMUM_TRIES
|
||||
from torba.constants import CENT
|
||||
from torba.manager import WalletManager
|
||||
|
||||
from .test_transaction import Output, get_output as utxo
|
||||
|
||||
|
||||
NULL_HASH = b'\x00'*32
|
||||
|
||||
|
||||
def search(*args, **kwargs):
|
||||
selection = CoinSelector(*args, **kwargs).branch_and_bound()
|
||||
return [o.output.amount for o in selection] if selection else selection
|
||||
|
||||
|
||||
class BaseSelectionTestCase(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
ledger = WalletManager().get_or_create_ledger(BTC.get_id())
|
||||
self.coin = BTC(ledger)
|
||||
|
||||
def estimates(self, *args):
|
||||
txos = args if isinstance(args[0], Output) else args[0]
|
||||
return [txo.get_estimator(self.coin) for txo in txos]
|
||||
|
||||
|
||||
class TestCoinSelectionTests(BaseSelectionTestCase):
|
||||
|
||||
def test_empty_coins(self):
|
||||
self.assertIsNone(CoinSelector([], 0, 0).select())
|
||||
|
||||
def test_skip_binary_search_if_total_not_enough(self):
|
||||
fee = utxo(CENT).get_estimator(self.coin).fee
|
||||
big_pool = self.estimates(utxo(CENT+fee) for _ in range(100))
|
||||
selector = CoinSelector(big_pool, 101 * CENT, 0)
|
||||
self.assertIsNone(selector.select())
|
||||
self.assertEqual(selector.tries, 0) # Never tried.
|
||||
# check happy path
|
||||
selector = CoinSelector(big_pool, 100 * CENT, 0)
|
||||
self.assertEqual(len(selector.select()), 100)
|
||||
self.assertEqual(selector.tries, 201)
|
||||
|
||||
def test_exact_match(self):
|
||||
fee = utxo(CENT).get_estimator(self.coin).fee
|
||||
utxo_pool = self.estimates(
|
||||
utxo(CENT + fee),
|
||||
utxo(CENT),
|
||||
utxo(CENT - fee)
|
||||
)
|
||||
selector = CoinSelector(utxo_pool, CENT, 0)
|
||||
match = selector.select()
|
||||
self.assertEqual([CENT + fee], [c.output.amount for c in match])
|
||||
self.assertTrue(selector.exact_match)
|
||||
|
||||
def test_random_draw(self):
|
||||
utxo_pool = self.estimates(
|
||||
utxo(2 * CENT),
|
||||
utxo(3 * CENT),
|
||||
utxo(4 * CENT)
|
||||
)
|
||||
selector = CoinSelector(utxo_pool, CENT, 0, '\x00')
|
||||
match = selector.select()
|
||||
self.assertEqual([2 * CENT], [c.output.amount for c in match])
|
||||
self.assertFalse(selector.exact_match)
|
||||
|
||||
|
||||
class TestOfficialBitcoinCoinSelectionTests(BaseSelectionTestCase):
|
||||
|
||||
# Bitcoin implementation:
|
||||
# https://github.com/bitcoin/bitcoin/blob/master/src/wallet/coinselection.cpp
|
||||
#
|
||||
# Bitcoin implementation tests:
|
||||
# https://github.com/bitcoin/bitcoin/blob/master/src/wallet/test/coinselector_tests.cpp
|
||||
#
|
||||
# Branch and Bound coin selection white paper:
|
||||
# https://murch.one/wp-content/uploads/2016/11/erhardt2016coinselection.pdf
|
||||
|
||||
def setUp(self):
|
||||
ledger = WalletManager().get_or_create_ledger(BTC.get_id())
|
||||
self.coin = BTC(ledger, 0)
|
||||
|
||||
def make_hard_case(self, utxos):
|
||||
target = 0
|
||||
utxo_pool = []
|
||||
for i in range(utxos):
|
||||
amount = 1 << (utxos+i)
|
||||
target += amount
|
||||
utxo_pool.append(utxo(amount))
|
||||
utxo_pool.append(utxo(amount + (1 << (utxos-1-i))))
|
||||
return self.estimates(utxo_pool), target
|
||||
|
||||
def test_branch_and_bound_coin_selection(self):
|
||||
utxo_pool = self.estimates(
|
||||
utxo(1 * CENT),
|
||||
utxo(2 * CENT),
|
||||
utxo(3 * CENT),
|
||||
utxo(4 * CENT)
|
||||
)
|
||||
|
||||
# Select 1 Cent
|
||||
self.assertEqual([1 * CENT], search(utxo_pool, 1 * CENT, 0.5 * CENT))
|
||||
|
||||
# Select 2 Cent
|
||||
self.assertEqual([2 * CENT], search(utxo_pool, 2 * CENT, 0.5 * CENT))
|
||||
|
||||
# Select 5 Cent
|
||||
self.assertEqual([3 * CENT, 2 * CENT], search(utxo_pool, 5 * CENT, 0.5 * CENT))
|
||||
|
||||
# Select 11 Cent, not possible
|
||||
self.assertIsNone(search(utxo_pool, 11 * CENT, 0.5 * CENT))
|
||||
|
||||
# Select 10 Cent
|
||||
utxo_pool += self.estimates(utxo(5 * CENT))
|
||||
self.assertEqual(
|
||||
[4 * CENT, 3 * CENT, 2 * CENT, 1 * CENT],
|
||||
search(utxo_pool, 10 * CENT, 0.5 * CENT)
|
||||
)
|
||||
|
||||
# Negative effective value
|
||||
# Select 10 Cent but have 1 Cent not be possible because too small
|
||||
# TODO: bitcoin has [5, 3, 2]
|
||||
self.assertEqual(
|
||||
[4 * CENT, 3 * CENT, 2 * CENT, 1 * CENT],
|
||||
search(utxo_pool, 10 * CENT, 5000)
|
||||
)
|
||||
|
||||
# Select 0.25 Cent, not possible
|
||||
self.assertIsNone(search(utxo_pool, 0.25 * CENT, 0.5 * CENT))
|
||||
|
||||
# Iteration exhaustion test
|
||||
utxo_pool, target = self.make_hard_case(17)
|
||||
selector = CoinSelector(utxo_pool, target, 0)
|
||||
self.assertIsNone(selector.branch_and_bound())
|
||||
self.assertEqual(selector.tries, MAXIMUM_TRIES) # Should exhaust
|
||||
utxo_pool, target = self.make_hard_case(14)
|
||||
self.assertIsNotNone(search(utxo_pool, target, 0)) # Should not exhaust
|
||||
|
||||
# Test same value early bailout optimization
|
||||
utxo_pool = self.estimates([
|
||||
utxo(7 * CENT),
|
||||
utxo(7 * CENT),
|
||||
utxo(7 * CENT),
|
||||
utxo(7 * CENT),
|
||||
utxo(2 * CENT)
|
||||
] + [utxo(5 * CENT)]*50000)
|
||||
self.assertEqual(
|
||||
[7 * CENT, 7 * CENT, 7 * CENT, 7 * CENT, 2 * CENT],
|
||||
search(utxo_pool, 30 * CENT, 5000)
|
||||
)
|
||||
|
||||
# Select 1 Cent with pool of only greater than 5 Cent
|
||||
utxo_pool = self.estimates(utxo(i * CENT) for i in range(5, 21))
|
||||
for _ in range(100):
|
||||
self.assertIsNone(search(utxo_pool, 1 * CENT, 2 * CENT))
|
21
tests/unit/test_hash.py
Normal file
21
tests/unit/test_hash.py
Normal file
|
@ -0,0 +1,21 @@
|
|||
import mock
|
||||
from unittest import TestCase
|
||||
from torba.hash import aes_decrypt, aes_encrypt
|
||||
|
||||
|
||||
class TestAESEncryptDecrypt(TestCase):
|
||||
|
||||
@mock.patch('os.urandom', side_effect=lambda i: b'f'*i)
|
||||
def test_encrypt(self, _):
|
||||
self.assertEqual(aes_encrypt(
|
||||
b'bubblegum', b'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'),
|
||||
b'OWsqm2goP4wXAPFyDde0IP2rPxRaESGr9NUlPn4y2nZrywQJo7pZCPt9ixYa7Ye9tzSpirF03Qd5OyI75xlGjd'
|
||||
b'4khKCvcX6tcViLmhIGUPY='
|
||||
)
|
||||
|
||||
def test_decrypt(self):
|
||||
self.assertEqual(aes_decrypt(
|
||||
b'bubblegum', b'WeW99mQgRExAEzPjJOAC/MdTJaHgz3hT+kazFbvVQqF/KFva48ulVMOewU7JWD0ufWJIxtAIQ'
|
||||
b'bGtlbvbq5w74bsCCJLrtNTHBhenkms8XccJXTr/UF/ZYTF1Prz8b0AQ'),
|
||||
b'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'
|
||||
)
|
0
tests/unit/test_ledger.py
Normal file
0
tests/unit/test_ledger.py
Normal file
23
tests/unit/test_mnemonic.py
Normal file
23
tests/unit/test_mnemonic.py
Normal file
|
@ -0,0 +1,23 @@
|
|||
import unittest
|
||||
from binascii import hexlify
|
||||
|
||||
from torba.mnemonic import Mnemonic
|
||||
|
||||
|
||||
class TestMnemonic(unittest.TestCase):
|
||||
|
||||
def test_mnemonic_to_seed(self):
|
||||
seed = Mnemonic.mnemonic_to_seed(mnemonic=u'foobar', passphrase=u'torba')
|
||||
self.assertEqual(
|
||||
hexlify(seed),
|
||||
b'475a419db4e991cab14f08bde2d357e52b3e7241f72c6d8a2f92782367feeee9f403dc6a37c26a3f02ab9'
|
||||
b'dec7f5063161eb139cea00da64cd77fba2f07c49ddc'
|
||||
)
|
||||
|
||||
def test_make_seed_decode_encode(self):
|
||||
iters = 10
|
||||
m = Mnemonic('en')
|
||||
for _ in range(iters):
|
||||
seed = m.make_seed()
|
||||
i = m.mnemonic_decode(seed)
|
||||
self.assertEqual(m.mnemonic_encode(i), seed)
|
218
tests/unit/test_script.py
Normal file
218
tests/unit/test_script.py
Normal file
|
@ -0,0 +1,218 @@
|
|||
from binascii import hexlify, unhexlify
|
||||
from twisted.trial import unittest
|
||||
|
||||
from torba.bcd_data_stream import BCDataStream
|
||||
from torba.basescript import Template, ParseError, tokenize, push_data
|
||||
from torba.basescript import PUSH_SINGLE, PUSH_MANY, OP_HASH160, OP_EQUAL
|
||||
from torba.basescript import BaseInputScript, BaseOutputScript
|
||||
|
||||
|
||||
def parse(opcodes, source):
|
||||
template = Template('test', opcodes)
|
||||
s = BCDataStream()
|
||||
for t in source:
|
||||
if isinstance(t, bytes):
|
||||
s.write_many(push_data(t))
|
||||
elif isinstance(t, int):
|
||||
s.write_uint8(t)
|
||||
else:
|
||||
raise ValueError()
|
||||
s.reset()
|
||||
return template.parse(tokenize(s))
|
||||
|
||||
|
||||
class TestScriptTemplates(unittest.TestCase):
|
||||
|
||||
def test_push_data(self):
|
||||
self.assertEqual(parse(
|
||||
(PUSH_SINGLE('script_hash'),),
|
||||
(b'abcdef',)
|
||||
), {
|
||||
'script_hash': b'abcdef'
|
||||
}
|
||||
)
|
||||
self.assertEqual(parse(
|
||||
(PUSH_SINGLE('first'), PUSH_SINGLE('last')),
|
||||
(b'Satoshi', b'Nakamoto')
|
||||
), {
|
||||
'first': b'Satoshi',
|
||||
'last': b'Nakamoto'
|
||||
}
|
||||
)
|
||||
self.assertEqual(parse(
|
||||
(OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL),
|
||||
(OP_HASH160, b'abcdef', OP_EQUAL)
|
||||
), {
|
||||
'script_hash': b'abcdef'
|
||||
}
|
||||
)
|
||||
|
||||
def test_push_data_many(self):
|
||||
self.assertEqual(parse(
|
||||
(PUSH_MANY('names'),),
|
||||
(b'amit',)
|
||||
), {
|
||||
'names': [b'amit']
|
||||
}
|
||||
)
|
||||
self.assertEqual(parse(
|
||||
(PUSH_MANY('names'),),
|
||||
(b'jeremy', b'amit', b'victor')
|
||||
), {
|
||||
'names': [b'jeremy', b'amit', b'victor']
|
||||
}
|
||||
)
|
||||
self.assertEqual(parse(
|
||||
(OP_HASH160, PUSH_MANY('names'), OP_EQUAL),
|
||||
(OP_HASH160, b'grin', b'jack', OP_EQUAL)
|
||||
), {
|
||||
'names': [b'grin', b'jack']
|
||||
}
|
||||
)
|
||||
|
||||
def test_push_data_mixed(self):
|
||||
self.assertEqual(parse(
|
||||
(PUSH_SINGLE('CEO'), PUSH_MANY('Devs'), PUSH_SINGLE('CTO'), PUSH_SINGLE('State')),
|
||||
(b'jeremy', b'lex', b'amit', b'victor', b'jack', b'grin', b'NH')
|
||||
), {
|
||||
'CEO': b'jeremy',
|
||||
'CTO': b'grin',
|
||||
'Devs': [b'lex', b'amit', b'victor', b'jack'],
|
||||
'State': b'NH'
|
||||
}
|
||||
)
|
||||
|
||||
def test_push_data_many_separated(self):
|
||||
self.assertEqual(parse(
|
||||
(PUSH_MANY('Chiefs'), OP_HASH160, PUSH_MANY('Devs')),
|
||||
(b'jeremy', b'grin', OP_HASH160, b'lex', b'jack')
|
||||
), {
|
||||
'Chiefs': [b'jeremy', b'grin'],
|
||||
'Devs': [b'lex', b'jack']
|
||||
}
|
||||
)
|
||||
|
||||
def test_push_data_many_not_separated(self):
|
||||
with self.assertRaisesRegexp(ParseError, 'consecutive PUSH_MANY'):
|
||||
parse((PUSH_MANY('Chiefs'), PUSH_MANY('Devs')), (b'jeremy', b'grin', b'lex', b'jack'))
|
||||
|
||||
|
||||
class TestRedeemPubKeyHash(unittest.TestCase):
|
||||
|
||||
def redeem_pubkey_hash(self, sig, pubkey):
|
||||
# this checks that factory function correctly sets up the script
|
||||
src1 = BaseInputScript.redeem_pubkey_hash(unhexlify(sig), unhexlify(pubkey))
|
||||
self.assertEqual(src1.template.name, 'pubkey_hash')
|
||||
self.assertEqual(hexlify(src1.values['signature']), sig)
|
||||
self.assertEqual(hexlify(src1.values['pubkey']), pubkey)
|
||||
# now we test that it will round trip
|
||||
src2 = BaseInputScript(src1.source)
|
||||
self.assertEqual(src2.template.name, 'pubkey_hash')
|
||||
self.assertEqual(hexlify(src2.values['signature']), sig)
|
||||
self.assertEqual(hexlify(src2.values['pubkey']), pubkey)
|
||||
return hexlify(src1.source)
|
||||
|
||||
def test_redeem_pubkey_hash_1(self):
|
||||
self.assertEqual(
|
||||
self.redeem_pubkey_hash(
|
||||
b'30450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e'
|
||||
b'02dc5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a8301',
|
||||
b'025415a06514230521bff3aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
|
||||
),
|
||||
b'4830450221009dc93f25184a8d483745cd3eceff49727a317c9bfd8be8d3d04517e9cdaf8dd502200e02d'
|
||||
b'c5939cad9562d2b1f303f185957581c4851c98d497af281118825e18a830121025415a06514230521bff3'
|
||||
b'aaface31f6db9d9bbc39bf1ca60a189e78731cfd4e1b'
|
||||
)
|
||||
|
||||
|
||||
class TestRedeemScriptHash(unittest.TestCase):
|
||||
|
||||
def redeem_script_hash(self, sigs, pubkeys):
|
||||
# this checks that factory function correctly sets up the script
|
||||
src1 = BaseInputScript.redeem_script_hash(
|
||||
[unhexlify(sig) for sig in sigs],
|
||||
[unhexlify(pubkey) for pubkey in pubkeys]
|
||||
)
|
||||
subscript1 = src1.values['script']
|
||||
self.assertEqual(src1.template.name, 'script_hash')
|
||||
self.assertEqual([hexlify(v) for v in src1.values['signatures']], sigs)
|
||||
self.assertEqual([hexlify(p) for p in subscript1.values['pubkeys']], pubkeys)
|
||||
self.assertEqual(subscript1.values['signatures_count'], len(sigs))
|
||||
self.assertEqual(subscript1.values['pubkeys_count'], len(pubkeys))
|
||||
# now we test that it will round trip
|
||||
src2 = BaseInputScript(src1.source)
|
||||
subscript2 = src2.values['script']
|
||||
self.assertEqual(src2.template.name, 'script_hash')
|
||||
self.assertEqual([hexlify(v) for v in src2.values['signatures']], sigs)
|
||||
self.assertEqual([hexlify(p) for p in subscript2.values['pubkeys']], pubkeys)
|
||||
self.assertEqual(subscript2.values['signatures_count'], len(sigs))
|
||||
self.assertEqual(subscript2.values['pubkeys_count'], len(pubkeys))
|
||||
return hexlify(src1.source)
|
||||
|
||||
def test_redeem_script_hash_1(self):
|
||||
self.assertEqual(
|
||||
self.redeem_script_hash([
|
||||
b'3045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575'
|
||||
b'e40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401',
|
||||
b'3044022024890462f731bd1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac68'
|
||||
b'9e35c4648e6beff1d42490207ba14027a638a62663b2ee40153299141eb01',
|
||||
b'30450221009910823e0142967a73c2d16c1560054d71c0625a385904ba2f1f53e0bc1daa8d02205cd'
|
||||
b'70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc318777a01'
|
||||
], [
|
||||
b'0372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a4',
|
||||
b'03061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb7692',
|
||||
b'02463bfbc1eaec74b5c21c09239ae18dbf6fc07833917df10d0b43e322810cee0c',
|
||||
b'02fa6a6455c26fb516cfa85ea8de81dd623a893ffd579ee2a00deb6cdf3633d6bb',
|
||||
b'0382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171ad0abeaa89'
|
||||
]),
|
||||
b'00483045022100fec82ed82687874f2a29cbdc8334e114af645c45298e85bb1efe69fcf15c617a0220575e'
|
||||
b'40399f9ada388d8e522899f4ec3b7256896dd9b02742f6567d960b613f0401473044022024890462f731bd'
|
||||
b'1a42a4716797bad94761fc4112e359117e591c07b8520ea33b02201ac689e35c4648e6beff1d42490207ba'
|
||||
b'14027a638a62663b2ee40153299141eb014830450221009910823e0142967a73c2d16c1560054d71c0625a'
|
||||
b'385904ba2f1f53e0bc1daa8d02205cd70a89c6cf031a8b07d1d5eb0d65d108c4d49c2d403f84fb03ad3dc3'
|
||||
b'18777a014cad53210372ba1fd35e5f1b1437cba0c4ebfc4025b7349366f9f9c7c8c4b03a47bd3f68a42103'
|
||||
b'061d250182b2db1ba144167fd8b0ef3fe0fc3a2fa046958f835ffaf0dfdb76922102463bfbc1eaec74b5c2'
|
||||
b'1c09239ae18dbf6fc07833917df10d0b43e322810cee0c2102fa6a6455c26fb516cfa85ea8de81dd623a89'
|
||||
b'3ffd579ee2a00deb6cdf3633d6bb210382910eae483ce4213d79d107bfc78f3d77e2a31ea597be45256171'
|
||||
b'ad0abeaa8955ae'
|
||||
)
|
||||
|
||||
|
||||
class TestPayPubKeyHash(unittest.TestCase):
|
||||
|
||||
def pay_pubkey_hash(self, pubkey_hash):
|
||||
# this checks that factory function correctly sets up the script
|
||||
src1 = BaseOutputScript.pay_pubkey_hash(unhexlify(pubkey_hash))
|
||||
self.assertEqual(src1.template.name, 'pay_pubkey_hash')
|
||||
self.assertEqual(hexlify(src1.values['pubkey_hash']), pubkey_hash)
|
||||
# now we test that it will round trip
|
||||
src2 = BaseOutputScript(src1.source)
|
||||
self.assertEqual(src2.template.name, 'pay_pubkey_hash')
|
||||
self.assertEqual(hexlify(src2.values['pubkey_hash']), pubkey_hash)
|
||||
return hexlify(src1.source)
|
||||
|
||||
def test_pay_pubkey_hash_1(self):
|
||||
self.assertEqual(
|
||||
self.pay_pubkey_hash(b'64d74d12acc93ba1ad495e8d2d0523252d664f4d'),
|
||||
b'76a91464d74d12acc93ba1ad495e8d2d0523252d664f4d88ac'
|
||||
)
|
||||
|
||||
|
||||
class TestPayScriptHash(unittest.TestCase):
|
||||
|
||||
def pay_script_hash(self, script_hash):
|
||||
# this checks that factory function correctly sets up the script
|
||||
src1 = BaseOutputScript.pay_script_hash(unhexlify(script_hash))
|
||||
self.assertEqual(src1.template.name, 'pay_script_hash')
|
||||
self.assertEqual(hexlify(src1.values['script_hash']), script_hash)
|
||||
# now we test that it will round trip
|
||||
src2 = BaseOutputScript(src1.source)
|
||||
self.assertEqual(src2.template.name, 'pay_script_hash')
|
||||
self.assertEqual(hexlify(src2.values['script_hash']), script_hash)
|
||||
return hexlify(src1.source)
|
||||
|
||||
def test_pay_pubkey_hash_1(self):
|
||||
self.assertEqual(
|
||||
self.pay_script_hash(b'63d65a2ee8c44426d06050cfd71c0f0ff3fc41ac'),
|
||||
b'a91463d65a2ee8c44426d06050cfd71c0f0ff3fc41ac87'
|
||||
)
|
174
tests/unit/test_transaction.py
Normal file
174
tests/unit/test_transaction.py
Normal file
|
@ -0,0 +1,174 @@
|
|||
from binascii import hexlify, unhexlify
|
||||
from twisted.trial import unittest
|
||||
|
||||
from torba.account import Account
|
||||
from torba.coin.btc import BTC, Transaction, Output, Input
|
||||
from torba.constants import CENT, COIN
|
||||
from torba.manager import WalletManager
|
||||
from torba.wallet import Wallet
|
||||
|
||||
|
||||
NULL_HASH = b'\x00'*32
|
||||
FEE_PER_BYTE = 50
|
||||
FEE_PER_CHAR = 200000
|
||||
|
||||
|
||||
def get_output(amount=CENT, pubkey_hash=NULL_HASH):
|
||||
return Transaction() \
|
||||
.add_outputs([Output.pay_pubkey_hash(amount, pubkey_hash)]) \
|
||||
.outputs[0]
|
||||
|
||||
|
||||
def get_input():
|
||||
return Input.spend(get_output())
|
||||
|
||||
|
||||
def get_transaction(txo=None):
|
||||
return Transaction() \
|
||||
.add_inputs([get_input()]) \
|
||||
.add_outputs([txo or Output.pay_pubkey_hash(CENT, NULL_HASH)])
|
||||
|
||||
|
||||
def get_wallet_and_coin():
|
||||
ledger = WalletManager().get_or_create_ledger(BTC.get_id())
|
||||
coin = BTC(ledger)
|
||||
return Wallet('Main', [coin], [Account.generate(coin, u'torba')]), coin
|
||||
|
||||
|
||||
class TestSizeAndFeeEstimation(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.wallet, self.coin = get_wallet_and_coin()
|
||||
|
||||
def io_fee(self, io):
|
||||
return self.coin.get_input_output_fee(io)
|
||||
|
||||
def test_output_size_and_fee(self):
|
||||
txo = get_output()
|
||||
self.assertEqual(txo.size, 46)
|
||||
self.assertEqual(self.io_fee(txo), 46 * FEE_PER_BYTE)
|
||||
|
||||
def test_input_size_and_fee(self):
|
||||
txi = get_input()
|
||||
self.assertEqual(txi.size, 148)
|
||||
self.assertEqual(self.io_fee(txi), 148 * FEE_PER_BYTE)
|
||||
|
||||
def test_transaction_size_and_fee(self):
|
||||
tx = get_transaction()
|
||||
base_size = tx.size - 1 - tx.inputs[0].size
|
||||
self.assertEqual(tx.size, 204)
|
||||
self.assertEqual(tx.base_size, base_size)
|
||||
self.assertEqual(self.coin.get_transaction_base_fee(tx), FEE_PER_BYTE * base_size)
|
||||
|
||||
|
||||
class TestTransactionSerialization(unittest.TestCase):
|
||||
|
||||
def test_genesis_transaction(self):
|
||||
raw = unhexlify(
|
||||
'01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4d04'
|
||||
'ffff001d0104455468652054696d65732030332f4a616e2f32303039204368616e63656c6c6f72206f6e20'
|
||||
'6272696e6b206f66207365636f6e64206261696c6f757420666f722062616e6b73ffffffff0100f2052a01'
|
||||
'000000434104678afdb0fe5548271967f1a67130b7105cd6a828e03909a67962e0ea1f61deb649f6bc3f4c'
|
||||
'ef38c4f35504e51ec112de5c384df7ba0b8d578a4c702b6bf11d5fac00000000'
|
||||
)
|
||||
tx = Transaction(raw)
|
||||
self.assertEqual(tx.version, 1)
|
||||
self.assertEqual(tx.locktime, 0)
|
||||
self.assertEqual(len(tx.inputs), 1)
|
||||
self.assertEqual(len(tx.outputs), 1)
|
||||
|
||||
coinbase = tx.inputs[0]
|
||||
self.assertEqual(coinbase.output_txid, NULL_HASH)
|
||||
self.assertEqual(coinbase.output_index, 0xFFFFFFFF)
|
||||
self.assertEqual(coinbase.sequence, 4294967295)
|
||||
self.assertTrue(coinbase.is_coinbase)
|
||||
self.assertEqual(coinbase.script, None)
|
||||
self.assertEqual(
|
||||
coinbase.coinbase[8:],
|
||||
b'The Times 03/Jan/2009 Chancellor on brink of second bailout for banks'
|
||||
)
|
||||
|
||||
out = tx.outputs[0]
|
||||
self.assertEqual(out.amount, 5000000000)
|
||||
self.assertEqual(out.index, 0)
|
||||
self.assertTrue(out.script.is_pay_pubkey)
|
||||
self.assertFalse(out.script.is_pay_pubkey_hash)
|
||||
self.assertFalse(out.script.is_pay_script_hash)
|
||||
|
||||
tx._reset()
|
||||
self.assertEqual(tx.raw, raw)
|
||||
|
||||
def test_coinbase_transaction(self):
|
||||
raw = unhexlify(
|
||||
'01000000010000000000000000000000000000000000000000000000000000000000000000ffffffff4e03'
|
||||
'1f5a070473319e592f4254432e434f4d2f4e59412ffabe6d6dcceb2a9d0444c51cabc4ee97a1a000036ca0'
|
||||
'cb48d25b94b78c8367d8b868454b0100000000000000c0309b21000008c5f8f80000ffffffff0291920b5d'
|
||||
'0000000017a914e083685a1097ce1ea9e91987ab9e94eae33d8a13870000000000000000266a24aa21a9ed'
|
||||
'e6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b400000000'
|
||||
)
|
||||
tx = Transaction(raw)
|
||||
self.assertEqual(tx.version, 1)
|
||||
self.assertEqual(tx.locktime, 0)
|
||||
self.assertEqual(len(tx.inputs), 1)
|
||||
self.assertEqual(len(tx.outputs), 2)
|
||||
|
||||
coinbase = tx.inputs[0]
|
||||
self.assertEqual(coinbase.output_txid, NULL_HASH)
|
||||
self.assertEqual(coinbase.output_index, 0xFFFFFFFF)
|
||||
self.assertEqual(coinbase.sequence, 4294967295)
|
||||
self.assertTrue(coinbase.is_coinbase)
|
||||
self.assertEqual(coinbase.script, None)
|
||||
self.assertEqual(
|
||||
coinbase.coinbase[9:22],
|
||||
b'/BTC.COM/NYA/'
|
||||
)
|
||||
|
||||
out = tx.outputs[0]
|
||||
self.assertEqual(out.amount, 1561039505)
|
||||
self.assertEqual(out.index, 0)
|
||||
self.assertFalse(out.script.is_pay_pubkey)
|
||||
self.assertFalse(out.script.is_pay_pubkey_hash)
|
||||
self.assertTrue(out.script.is_pay_script_hash)
|
||||
self.assertFalse(out.script.is_return_data)
|
||||
|
||||
out1 = tx.outputs[1]
|
||||
self.assertEqual(out1.amount, 0)
|
||||
self.assertEqual(out1.index, 1)
|
||||
self.assertEqual(
|
||||
hexlify(out1.script.values['data']),
|
||||
b'aa21a9ede6c99265a6b9e1d36c962fda0516b35709c49dc3b8176fa7e5d5f1f6197884b4'
|
||||
)
|
||||
self.assertTrue(out1.script.is_return_data)
|
||||
self.assertFalse(out1.script.is_pay_pubkey)
|
||||
self.assertFalse(out1.script.is_pay_pubkey_hash)
|
||||
self.assertFalse(out1.script.is_pay_script_hash)
|
||||
|
||||
tx._reset()
|
||||
self.assertEqual(tx.raw, raw)
|
||||
|
||||
|
||||
class TestTransactionSigning(unittest.TestCase):
|
||||
|
||||
def test_sign(self):
|
||||
ledger = WalletManager().get_or_create_ledger(BTC.get_id())
|
||||
coin = BTC(ledger)
|
||||
wallet = Wallet('Main', [coin], [Account.from_seed(
|
||||
coin, u'carbon smart garage balance margin twelve chest sword toast envelope bottom stom'
|
||||
u'ach absent', u'torba'
|
||||
)])
|
||||
account = wallet.default_account
|
||||
|
||||
address1 = account.receiving_keys.generate_next_address()
|
||||
address2 = account.receiving_keys.generate_next_address()
|
||||
pubkey_hash1 = account.coin.address_to_hash160(address1)
|
||||
pubkey_hash2 = account.coin.address_to_hash160(address2)
|
||||
|
||||
tx = Transaction() \
|
||||
.add_inputs([Input.spend(get_output(2*COIN, pubkey_hash1))]) \
|
||||
.add_outputs([Output.pay_pubkey_hash(int(1.9*COIN), pubkey_hash2)]) \
|
||||
.sign(account)
|
||||
|
||||
self.assertEqual(
|
||||
hexlify(tx.inputs[0].script.values['signature']),
|
||||
b'304402203d463519290d06891e461ea5256c56097ccdad53379b1bb4e51ec5abc6e9fd02022034ed15b9d7c678716c4aa7c0fd26c688e8f9db8075838f2839ab55d551b62c0a01'
|
||||
)
|
97
tests/unit/test_wallet.py
Normal file
97
tests/unit/test_wallet.py
Normal file
|
@ -0,0 +1,97 @@
|
|||
from twisted.trial import unittest
|
||||
|
||||
from torba.coin.btc import BTC
|
||||
from torba.manager import WalletManager
|
||||
from torba.wallet import Account, Wallet, WalletStorage
|
||||
|
||||
from .ftc import FTC
|
||||
|
||||
|
||||
class TestWalletCreation(unittest.TestCase):
|
||||
|
||||
def setUp(self):
|
||||
self.manager = WalletManager()
|
||||
self.btc_ledger = self.manager.get_or_create_ledger(BTC.get_id())
|
||||
self.ftc_ledger = self.manager.get_or_create_ledger(FTC.get_id())
|
||||
|
||||
def test_create_wallet_and_accounts(self):
|
||||
wallet = Wallet()
|
||||
self.assertEqual(wallet.name, 'Wallet')
|
||||
self.assertEqual(wallet.coins, [])
|
||||
self.assertEqual(wallet.accounts, [])
|
||||
|
||||
account1 = wallet.generate_account(self.btc_ledger)
|
||||
account2 = wallet.generate_account(self.btc_ledger)
|
||||
account3 = wallet.generate_account(self.ftc_ledger)
|
||||
self.assertEqual(wallet.default_account, account1)
|
||||
self.assertEqual(len(wallet.coins), 2)
|
||||
self.assertEqual(len(wallet.accounts), 3)
|
||||
self.assertIsInstance(wallet.coins[0], BTC)
|
||||
self.assertIsInstance(wallet.coins[1], FTC)
|
||||
|
||||
self.assertEqual(len(account1.receiving_keys.addresses), 0)
|
||||
self.assertEqual(len(account1.change_keys.addresses), 0)
|
||||
self.assertEqual(len(account2.receiving_keys.addresses), 0)
|
||||
self.assertEqual(len(account2.change_keys.addresses), 0)
|
||||
self.assertEqual(len(account3.receiving_keys.addresses), 0)
|
||||
self.assertEqual(len(account3.change_keys.addresses), 0)
|
||||
account1.ensure_enough_addresses()
|
||||
account2.ensure_enough_addresses()
|
||||
account3.ensure_enough_addresses()
|
||||
self.assertEqual(len(account1.receiving_keys.addresses), 20)
|
||||
self.assertEqual(len(account1.change_keys.addresses), 6)
|
||||
self.assertEqual(len(account2.receiving_keys.addresses), 20)
|
||||
self.assertEqual(len(account2.change_keys.addresses), 6)
|
||||
self.assertEqual(len(account3.receiving_keys.addresses), 20)
|
||||
self.assertEqual(len(account3.change_keys.addresses), 6)
|
||||
|
||||
def test_load_and_save_wallet(self):
|
||||
wallet_dict = {
|
||||
'name': 'Main Wallet',
|
||||
'accounts': [
|
||||
{
|
||||
'coin': 'btc_mainnet',
|
||||
'seed':
|
||||
"carbon smart garage balance margin twelve chest sword toast envelope bottom stomac"
|
||||
"h absent",
|
||||
'encrypted': False,
|
||||
'private_key':
|
||||
'xprv9s21ZrQH143K2dyhK7SevfRG72bYDRNv25yKPWWm6dqApNxm1Zb1m5gGcBWYfbsPjTr2v5joit8Af2Zp5P'
|
||||
'6yz3jMbycrLrRMpeAJxR8qDg8',
|
||||
'public_key':
|
||||
'xpub661MyMwAqRbcF84AR8yfHoMzf4S2ct6mPJtvBtvNeyN9hBHuZ6uGJszkTSn5fQUCdz3XU17eBzFeAUwV6f'
|
||||
'iW44g14WF52fYC5J483wqQ5ZP',
|
||||
'receiving_gap': 10,
|
||||
'receiving_keys': [
|
||||
'0222345947a59dca4a3363ffa81ac87dd907d2b2feff57383eaeddbab266ca5f2d',
|
||||
'03fdc9826d5d00a484188cba8eb7dba5877c0323acb77905b7bcbbab35d94be9f6'
|
||||
],
|
||||
'change_gap': 10,
|
||||
'change_keys': [
|
||||
'038836be4147836ed6b4df6a89e0d9f1b1c11cec529b7ff5407de57f2e5b032c83'
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
storage = WalletStorage(default=wallet_dict)
|
||||
wallet = Wallet.from_storage(storage, self.manager)
|
||||
self.assertEqual(wallet.name, 'Main Wallet')
|
||||
self.assertEqual(len(wallet.coins), 1)
|
||||
self.assertIsInstance(wallet.coins[0], BTC)
|
||||
self.assertEqual(len(wallet.accounts), 1)
|
||||
account = wallet.default_account
|
||||
self.assertIsInstance(account, Account)
|
||||
|
||||
self.assertEqual(len(account.receiving_keys.addresses), 2)
|
||||
self.assertEqual(
|
||||
account.receiving_keys.addresses[0],
|
||||
'1PmX9T3sCiDysNtWszJa44SkKcpGc2NaXP'
|
||||
)
|
||||
self.assertEqual(len(account.change_keys.addresses), 1)
|
||||
self.assertEqual(
|
||||
account.change_keys.addresses[0],
|
||||
'1PUbu1D1f3c244JPRSJKBCxRqui5NT6geR'
|
||||
)
|
||||
wallet_dict['coins'] = {'btc_mainnet': {'fee_per_byte': 50}}
|
||||
self.assertDictEqual(wallet_dict, wallet.to_dict())
|
2
torba/__init__.py
Normal file
2
torba/__init__.py
Normal file
|
@ -0,0 +1,2 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
||||
__version__ = '0.0.1'
|
190
torba/account.py
Normal file
190
torba/account.py
Normal file
|
@ -0,0 +1,190 @@
|
|||
import itertools
|
||||
from typing import Dict, Generator
|
||||
from binascii import hexlify, unhexlify
|
||||
|
||||
from torba.basecoin import BaseCoin
|
||||
from torba.mnemonic import Mnemonic
|
||||
from torba.bip32 import PrivateKey, PubKey, from_extended_key_string
|
||||
from torba.hash import double_sha256, aes_encrypt, aes_decrypt
|
||||
|
||||
|
||||
class KeyChain:
|
||||
|
||||
def __init__(self, parent_key, child_keys, gap):
|
||||
self.coin = parent_key.coin
|
||||
self.parent_key = parent_key # type: PubKey
|
||||
self.child_keys = child_keys
|
||||
self.minimum_gap = gap
|
||||
self.addresses = [
|
||||
self.coin.public_key_to_address(key)
|
||||
for key in child_keys
|
||||
]
|
||||
|
||||
@property
|
||||
def has_gap(self):
|
||||
if len(self.addresses) < self.minimum_gap:
|
||||
return False
|
||||
for address in self.addresses[-self.minimum_gap:]:
|
||||
if self.coin.ledger.is_address_old(address):
|
||||
return False
|
||||
return True
|
||||
|
||||
def generate_next_address(self):
|
||||
child_key = self.parent_key.child(len(self.child_keys))
|
||||
self.child_keys.append(child_key.pubkey_bytes)
|
||||
self.addresses.append(child_key.address)
|
||||
return child_key.address
|
||||
|
||||
def ensure_enough_addresses(self):
|
||||
starting_length = len(self.addresses)
|
||||
while not self.has_gap:
|
||||
self.generate_next_address()
|
||||
return self.addresses[starting_length:]
|
||||
|
||||
|
||||
class Account:
|
||||
|
||||
def __init__(self, coin, seed, encrypted, private_key, public_key,
|
||||
receiving_keys=None, receiving_gap=20,
|
||||
change_keys=None, change_gap=6):
|
||||
self.coin = coin # type: BaseCoin
|
||||
self.seed = seed # type: str
|
||||
self.encrypted = encrypted # type: bool
|
||||
self.private_key = private_key # type: PrivateKey
|
||||
self.public_key = public_key # type: PubKey
|
||||
self.keychains = (
|
||||
KeyChain(public_key.child(0), receiving_keys or [], receiving_gap),
|
||||
KeyChain(public_key.child(1), change_keys or [], change_gap)
|
||||
)
|
||||
self.receiving_keys, self.change_keys = self.keychains
|
||||
|
||||
@classmethod
|
||||
def generate(cls, coin, password): # type: (BaseCoin, unicode) -> Account
|
||||
seed = Mnemonic().make_seed()
|
||||
return cls.from_seed(coin, seed, password)
|
||||
|
||||
@classmethod
|
||||
def from_seed(cls, coin, seed, password): # type: (BaseCoin, unicode, unicode) -> Account
|
||||
private_key = cls.get_private_key_from_seed(coin, seed, password)
|
||||
return cls(
|
||||
coin=coin, seed=seed, encrypted=False,
|
||||
private_key=private_key,
|
||||
public_key=private_key.public_key
|
||||
)
|
||||
|
||||
@staticmethod
|
||||
def get_private_key_from_seed(coin, seed, password): # type: (BaseCoin, unicode, unicode) -> PrivateKey
|
||||
return PrivateKey.from_seed(coin, Mnemonic.mnemonic_to_seed(seed, password))
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, coin, d): # type: (BaseCoin, Dict) -> Account
|
||||
if not d['encrypted']:
|
||||
private_key = from_extended_key_string(coin, d['private_key'])
|
||||
public_key = private_key.public_key
|
||||
else:
|
||||
private_key = d['private_key']
|
||||
public_key = from_extended_key_string(coin, d['public_key'])
|
||||
return cls(
|
||||
coin=coin,
|
||||
seed=d['seed'],
|
||||
encrypted=d['encrypted'],
|
||||
private_key=private_key,
|
||||
public_key=public_key,
|
||||
receiving_keys=[unhexlify(k) for k in d['receiving_keys']],
|
||||
receiving_gap=d['receiving_gap'],
|
||||
change_keys=[unhexlify(k) for k in d['change_keys']],
|
||||
change_gap=d['change_gap']
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'coin': self.coin.get_id(),
|
||||
'seed': self.seed,
|
||||
'encrypted': self.encrypted,
|
||||
'private_key': self.private_key if self.encrypted else
|
||||
self.private_key.extended_key_string(),
|
||||
'public_key': self.public_key.extended_key_string(),
|
||||
'receiving_keys': [hexlify(k).decode('iso-8859-1') for k in self.receiving_keys.child_keys],
|
||||
'receiving_gap': self.receiving_keys.minimum_gap,
|
||||
'change_keys': [hexlify(k).decode('iso-8859-1') for k in self.change_keys.child_keys],
|
||||
'change_gap': self.change_keys.minimum_gap
|
||||
}
|
||||
|
||||
def decrypt(self, password):
|
||||
assert self.encrypted, "Key is not encrypted."
|
||||
secret = double_sha256(password)
|
||||
self.seed = aes_decrypt(secret, self.seed)
|
||||
self.private_key = from_extended_key_string(self.coin, aes_decrypt(secret, self.private_key))
|
||||
self.encrypted = False
|
||||
|
||||
def encrypt(self, password):
|
||||
assert not self.encrypted, "Key is already encrypted."
|
||||
secret = double_sha256(password)
|
||||
self.seed = aes_encrypt(secret, self.seed)
|
||||
self.private_key = aes_encrypt(secret, self.private_key.extended_key_string())
|
||||
self.encrypted = True
|
||||
|
||||
@property
|
||||
def addresses(self):
|
||||
return itertools.chain(self.receiving_keys.addresses, self.change_keys.addresses)
|
||||
|
||||
def get_private_key_for_address(self, address):
|
||||
assert not self.encrypted, "Cannot get private key on encrypted wallet account."
|
||||
for a, keychain in enumerate(self.keychains):
|
||||
for b, match in enumerate(keychain.addresses):
|
||||
if address == match:
|
||||
return self.private_key.child(a).child(b)
|
||||
|
||||
def ensure_enough_addresses(self):
|
||||
return [
|
||||
address
|
||||
for keychain in self.keychains
|
||||
for address in keychain.ensure_enough_addresses()
|
||||
]
|
||||
|
||||
def addresses_without_history(self):
|
||||
for address in self.addresses:
|
||||
if not self.coin.ledger.has_address(address):
|
||||
yield address
|
||||
|
||||
def get_least_used_receiving_address(self, max_transactions=1000):
|
||||
return self._get_least_used_address(
|
||||
self.receiving_keys.addresses,
|
||||
self.receiving_keys,
|
||||
max_transactions
|
||||
)
|
||||
|
||||
def get_least_used_change_address(self, max_transactions=100):
|
||||
return self._get_least_used_address(
|
||||
self.change_keys.addresses,
|
||||
self.change_keys,
|
||||
max_transactions
|
||||
)
|
||||
|
||||
def _get_least_used_address(self, addresses, keychain, max_transactions):
|
||||
ledger = self.coin.ledger
|
||||
address = ledger.get_least_used_address(addresses, max_transactions)
|
||||
if address:
|
||||
return address
|
||||
address = keychain.generate_next_address()
|
||||
ledger.subscribe_history(address)
|
||||
return address
|
||||
|
||||
def get_unspent_utxos(self):
|
||||
return [
|
||||
utxo
|
||||
for address in self.addresses
|
||||
for utxo in self.coin.ledger.get_unspent_outputs(address)
|
||||
]
|
||||
|
||||
def get_balance(self):
|
||||
return sum(utxo.amount for utxo in self.get_unspent_utxos())
|
||||
|
||||
|
||||
class AccountsView:
|
||||
|
||||
def __init__(self, accounts):
|
||||
self._accounts_generator = accounts
|
||||
|
||||
def __iter__(self): # type: () -> Generator[Account]
|
||||
return self._accounts_generator()
|
79
torba/basecoin.py
Normal file
79
torba/basecoin.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
import six
|
||||
from typing import Dict, Type
|
||||
from torba.hash import hash160, double_sha256, Base58
|
||||
|
||||
|
||||
class CoinRegistry(type):
|
||||
coins = {} # type: Dict[str, Type[BaseCoin]]
|
||||
|
||||
def __new__(mcs, name, bases, attrs):
|
||||
cls = super(CoinRegistry, mcs).__new__(mcs, name, bases, attrs) # type: Type[BaseCoin]
|
||||
if not (name == 'BaseCoin' and not bases):
|
||||
coin_id = cls.get_id()
|
||||
assert coin_id not in mcs.coins, 'Coin with id "{}" already registered.'.format(coin_id)
|
||||
mcs.coins[coin_id] = cls
|
||||
assert cls.ledger_class.coin_class is None, (
|
||||
"Ledger ({}) which this coin ({}) references is already referenced by another "
|
||||
"coin ({}). One to one relationship between a coin and a ledger is strictly and "
|
||||
"automatically enforced. Make sure that coin_class=None in the ledger and that "
|
||||
"another Coin isn't already referencing this Ledger."
|
||||
).format(cls.ledger_class.__name__, name, cls.ledger_class.coin_class.__name__)
|
||||
# create back reference from ledger to the coin
|
||||
cls.ledger_class.coin_class = cls
|
||||
return cls
|
||||
|
||||
@classmethod
|
||||
def get_coin_class(mcs, coin_id): # type: (str) -> Type[BaseCoin]
|
||||
return mcs.coins[coin_id]
|
||||
|
||||
|
||||
class BaseCoin(six.with_metaclass(CoinRegistry)):
|
||||
|
||||
name = None
|
||||
symbol = None
|
||||
network = None
|
||||
|
||||
ledger_class = None # type: Type[BaseLedger]
|
||||
transaction_class = None # type: Type[BaseTransaction]
|
||||
|
||||
secret_prefix = None
|
||||
pubkey_address_prefix = None
|
||||
script_address_prefix = None
|
||||
extended_public_key_prefix = None
|
||||
extended_private_key_prefix = None
|
||||
|
||||
def __init__(self, ledger, fee_per_byte):
|
||||
self.ledger = ledger
|
||||
self.fee_per_byte = fee_per_byte
|
||||
|
||||
@classmethod
|
||||
def get_id(cls):
|
||||
return '{}_{}'.format(cls.symbol.lower(), cls.network.lower())
|
||||
|
||||
def to_dict(self):
|
||||
return {'fee_per_byte': self.fee_per_byte}
|
||||
|
||||
def get_input_output_fee(self, io):
|
||||
""" Fee based on size of the input / output. """
|
||||
return self.fee_per_byte * io.size
|
||||
|
||||
def get_transaction_base_fee(self, tx):
|
||||
""" Fee for the transaction header and all outputs; without inputs. """
|
||||
return self.fee_per_byte * tx.base_size
|
||||
|
||||
def hash160_to_address(self, h160):
|
||||
raw_address = self.pubkey_address_prefix + h160
|
||||
return Base58.encode(bytearray(raw_address + double_sha256(raw_address)[0:4]))
|
||||
|
||||
@staticmethod
|
||||
def address_to_hash160(address):
|
||||
bytes = Base58.decode(address)
|
||||
prefix, pubkey_bytes, addr_checksum = bytes[0], bytes[1:21], bytes[21:]
|
||||
return pubkey_bytes
|
||||
|
||||
def public_key_to_address(self, public_key):
|
||||
return self.hash160_to_address(hash160(public_key))
|
||||
|
||||
@staticmethod
|
||||
def private_key_to_wif(private_key):
|
||||
return b'\x1c' + private_key + b'\x01'
|
469
torba/baseledger.py
Normal file
469
torba/baseledger.py
Normal file
|
@ -0,0 +1,469 @@
|
|||
import os
|
||||
import hashlib
|
||||
from binascii import hexlify, unhexlify
|
||||
from typing import List, Dict, Type
|
||||
from operator import itemgetter
|
||||
|
||||
from twisted.internet import threads, defer, task, reactor
|
||||
|
||||
from torba.account import Account, AccountsView
|
||||
from torba.basecoin import BaseCoin
|
||||
from torba.basetransaction import BaseTransaction
|
||||
from torba.basenetwork import BaseNetwork
|
||||
from torba.stream import StreamController, execute_serially
|
||||
from torba.util import hex_to_int, int_to_hex, rev_hex, hash_encode
|
||||
from torba.hash import double_sha256, pow_hash
|
||||
|
||||
|
||||
class Address:
|
||||
|
||||
def __init__(self, pubkey_hash):
|
||||
self.pubkey_hash = pubkey_hash
|
||||
self.transactions = [] # type: List[BaseTransaction]
|
||||
|
||||
def __iter__(self):
|
||||
return iter(self.transactions)
|
||||
|
||||
def __len__(self):
|
||||
return len(self.transactions)
|
||||
|
||||
def add_transaction(self, transaction):
|
||||
self.transactions.append(transaction)
|
||||
|
||||
def get_unspent_utxos(self):
|
||||
inputs, outputs, utxos = [], [], []
|
||||
for tx in self:
|
||||
for txi in tx.inputs:
|
||||
inputs.append((txi.output_txid, txi.output_index))
|
||||
for txo in tx.outputs:
|
||||
if txo.script.is_pay_pubkey_hash and txo.script.values['pubkey_hash'] == self.pubkey_hash:
|
||||
outputs.append((txo, txo.transaction.hash, txo.index))
|
||||
for output in set(outputs):
|
||||
if output[1:] not in inputs:
|
||||
yield output[0]
|
||||
|
||||
|
||||
class BaseLedger:
|
||||
|
||||
# coin_class is automatically set by BaseCoin metaclass
|
||||
# when it creates the Coin classes, there is a 1..1 relationship
|
||||
# between a coin and a ledger (at the class level) but a 1..* relationship
|
||||
# at instance level. Only one Ledger instance should exist per coin class,
|
||||
# but many coin instances can exist linking back to the single Ledger instance.
|
||||
coin_class = None # type: Type[BaseCoin]
|
||||
network_class = None # type: Type[BaseNetwork]
|
||||
|
||||
verify_bits_to_target = True
|
||||
|
||||
def __init__(self, accounts, config=None, network=None, db=None):
|
||||
self.accounts = accounts # type: AccountsView
|
||||
self.config = config or {}
|
||||
self.db = db
|
||||
self.addresses = {} # type: Dict[str, Address]
|
||||
self.transactions = {} # type: Dict[str, BaseTransaction]
|
||||
self.headers = Headers(self)
|
||||
self._on_transaction_controller = StreamController()
|
||||
self.on_transaction = self._on_transaction_controller.stream
|
||||
self.network = network or self.network_class(self.config)
|
||||
self.network.on_header.listen(self.process_header)
|
||||
self.network.on_status.listen(self.process_status)
|
||||
|
||||
@property
|
||||
def transaction_class(self):
|
||||
return self.coin_class.transaction_class
|
||||
|
||||
@classmethod
|
||||
def from_json(cls, json_dict):
|
||||
return cls(json_dict)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def load(self):
|
||||
txs = yield self.db.get_transactions()
|
||||
for tx_hash, raw, height in txs:
|
||||
self.transactions[tx_hash] = self.transaction_class(raw, height)
|
||||
txios = yield self.db.get_transaction_inputs_and_outputs()
|
||||
for tx_hash, address_hash, input_output, amount, height in txios:
|
||||
tx = self.transactions[tx_hash]
|
||||
address = self.addresses.get(address_hash)
|
||||
if address is None:
|
||||
address = self.addresses[address_hash] = Address(self.coin_class.address_to_hash160(address_hash))
|
||||
tx.add_txio(address, input_output, amount)
|
||||
address.add_transaction(tx)
|
||||
|
||||
def is_address_old(self, address, age_limit=2):
|
||||
age = -1
|
||||
for tx in self.get_transactions(address, []):
|
||||
if tx.height == 0:
|
||||
tx_age = 0
|
||||
else:
|
||||
tx_age = self.headers.height - tx.height + 1
|
||||
if tx_age > age:
|
||||
age = tx_age
|
||||
return age > age_limit
|
||||
|
||||
def add_transaction(self, address, transaction): # type: (str, BaseTransaction) -> None
|
||||
if address not in self.addresses:
|
||||
self.addresses[address] = Address(self.coin_class.address_to_hash160(address))
|
||||
self.addresses[address].add_transaction(transaction)
|
||||
self.transactions.setdefault(hexlify(transaction.id), transaction)
|
||||
self._on_transaction_controller.add(transaction)
|
||||
|
||||
def has_address(self, address):
|
||||
return address in self.addresses
|
||||
|
||||
def get_transaction(self, tx_hash, *args):
|
||||
return self.transactions.get(tx_hash, *args)
|
||||
|
||||
def get_transactions(self, address, *args):
|
||||
return self.addresses.get(address, *args)
|
||||
|
||||
def get_status(self, address):
|
||||
hashes = [
|
||||
'{}:{}:'.format(hexlify(tx.hash), tx.height).encode()
|
||||
for tx in self.get_transactions(address, []) if tx.height is not None
|
||||
]
|
||||
if hashes:
|
||||
return hexlify(hashlib.sha256(b''.join(hashes)).digest())
|
||||
|
||||
def has_transaction(self, tx_hash):
|
||||
return tx_hash in self.transactions
|
||||
|
||||
def get_least_used_address(self, addresses, max_transactions=100):
|
||||
transaction_counts = []
|
||||
for address in addresses:
|
||||
transactions = self.get_transactions(address, [])
|
||||
tx_count = len(transactions)
|
||||
if tx_count == 0:
|
||||
return address
|
||||
elif tx_count >= max_transactions:
|
||||
continue
|
||||
else:
|
||||
transaction_counts.append((address, tx_count))
|
||||
if transaction_counts:
|
||||
transaction_counts.sort(key=itemgetter(1))
|
||||
return transaction_counts[0]
|
||||
|
||||
def get_unspent_outputs(self, address):
|
||||
if address in self.addresses:
|
||||
return list(self.addresses[address].get_unspent_utxos())
|
||||
return []
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start(self):
|
||||
first_connection = self.network.on_connected.first
|
||||
self.network.start()
|
||||
yield first_connection
|
||||
self.headers.touch()
|
||||
yield self.update_headers()
|
||||
yield self.network.subscribe_headers()
|
||||
yield self.update_accounts()
|
||||
|
||||
def stop(self):
|
||||
return self.network.stop()
|
||||
|
||||
@execute_serially
|
||||
@defer.inlineCallbacks
|
||||
def update_headers(self):
|
||||
while True:
|
||||
height_sought = len(self.headers)
|
||||
headers = yield self.network.get_headers(height_sought)
|
||||
print("received {} headers starting at {} height".format(headers['count'], height_sought))
|
||||
#log.info("received {} headers starting at {} height".format(headers['count'], height_sought))
|
||||
if headers['count'] <= 0:
|
||||
break
|
||||
yield self.headers.connect(height_sought, unhexlify(headers['hex']))
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def process_header(self, response):
|
||||
header = response[0]
|
||||
if self.update_headers.is_running:
|
||||
return
|
||||
if header['height'] == len(self.headers):
|
||||
# New header from network directly connects after the last local header.
|
||||
yield self.headers.connect(len(self.headers), unhexlify(header['hex']))
|
||||
elif header['height'] > len(self.headers):
|
||||
# New header is several heights ahead of local, do download instead.
|
||||
yield self.update_headers()
|
||||
|
||||
@execute_serially
|
||||
def update_accounts(self):
|
||||
return defer.DeferredList([
|
||||
self.update_account(a) for a in self.accounts
|
||||
])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_account(self, account): # type: (Account) -> defer.Defferred
|
||||
# Before subscribing, download history for any addresses that don't have any,
|
||||
# this avoids situation where we're getting status updates to addresses we know
|
||||
# need to update anyways. Continue to get history and create more addresses until
|
||||
# all missing addresses are created and history for them is fully restored.
|
||||
account.ensure_enough_addresses()
|
||||
addresses = list(account.addresses_without_history())
|
||||
while addresses:
|
||||
yield defer.DeferredList([
|
||||
self.update_history(a) for a in addresses
|
||||
])
|
||||
addresses = account.ensure_enough_addresses()
|
||||
|
||||
# By this point all of the addresses should be restored and we
|
||||
# can now subscribe all of them to receive updates.
|
||||
yield defer.DeferredList([
|
||||
self.subscribe_history(address)
|
||||
for address in account.addresses
|
||||
])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def update_history(self, address):
|
||||
history = yield self.network.get_history(address)
|
||||
for hash in map(itemgetter('tx_hash'), history):
|
||||
transaction = self.get_transaction(hash)
|
||||
if not transaction:
|
||||
raw = yield self.network.get_transaction(hash)
|
||||
transaction = self.transaction_class(unhexlify(raw))
|
||||
self.add_transaction(address, transaction)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def subscribe_history(self, address):
|
||||
status = yield self.network.subscribe_address(address)
|
||||
if status != self.get_status(address):
|
||||
yield self.update_history(address)
|
||||
|
||||
def process_status(self, response):
|
||||
address, status = response
|
||||
if status != self.get_status(address):
|
||||
task.deferLater(reactor, 0, self.update_history, address)
|
||||
|
||||
def broadcast(self, tx):
|
||||
return self.network.broadcast(hexlify(tx.raw))
|
||||
|
||||
|
||||
class Headers:
|
||||
|
||||
def __init__(self, ledger):
|
||||
self.ledger = ledger
|
||||
self._size = None
|
||||
self._on_change_controller = StreamController()
|
||||
self.on_changed = self._on_change_controller.stream
|
||||
|
||||
@property
|
||||
def path(self):
|
||||
wallet_path = self.ledger.config.get('wallet_path', '')
|
||||
filename = '{}_headers'.format(self.ledger.coin_class.get_id())
|
||||
return os.path.join(wallet_path, filename)
|
||||
|
||||
def touch(self):
|
||||
if not os.path.exists(self.path):
|
||||
with open(self.path, 'wb'):
|
||||
pass
|
||||
|
||||
@property
|
||||
def height(self):
|
||||
return len(self) - 1
|
||||
|
||||
def sync_read_length(self):
|
||||
return os.path.getsize(self.path) // self.ledger.header_size
|
||||
|
||||
def sync_read_header(self, height):
|
||||
if 0 <= height < len(self):
|
||||
with open(self.path, 'rb') as f:
|
||||
f.seek(height * self.ledger.header_size)
|
||||
return f.read(self.ledger.header_size)
|
||||
|
||||
def __len__(self):
|
||||
if self._size is None:
|
||||
self._size = self.sync_read_length()
|
||||
return self._size
|
||||
|
||||
def __getitem__(self, height):
|
||||
assert not isinstance(height, slice),\
|
||||
"Slicing of header chain has not been implemented yet."
|
||||
header = self.sync_read_header(height)
|
||||
return self._deserialize(height, header)
|
||||
|
||||
@execute_serially
|
||||
@defer.inlineCallbacks
|
||||
def connect(self, start, headers):
|
||||
yield threads.deferToThread(self._sync_connect, start, headers)
|
||||
|
||||
def _sync_connect(self, start, headers):
|
||||
previous_header = None
|
||||
for header in self._iterate_headers(start, headers):
|
||||
height = header['block_height']
|
||||
if previous_header is None and height > 0:
|
||||
previous_header = self[height-1]
|
||||
self._verify_header(height, header, previous_header)
|
||||
previous_header = header
|
||||
|
||||
with open(self.path, 'r+b') as f:
|
||||
f.seek(start * self.ledger.header_size)
|
||||
f.write(headers)
|
||||
f.truncate()
|
||||
|
||||
_old_size = self._size
|
||||
self._size = self.sync_read_length()
|
||||
change = self._size - _old_size
|
||||
#log.info('saved {} header blocks'.format(change))
|
||||
self._on_change_controller.add(change)
|
||||
|
||||
def _iterate_headers(self, height, headers):
|
||||
assert len(headers) % self.ledger.header_size == 0
|
||||
for idx in range(len(headers) // self.ledger.header_size):
|
||||
start, end = idx * self.ledger.header_size, (idx + 1) * self.ledger.header_size
|
||||
header = headers[start:end]
|
||||
yield self._deserialize(height+idx, header)
|
||||
|
||||
def _verify_header(self, height, header, previous_header):
|
||||
previous_hash = self._hash_header(previous_header)
|
||||
assert previous_hash == header['prev_block_hash'], \
|
||||
"prev hash mismatch: {} vs {}".format(previous_hash, header['prev_block_hash'])
|
||||
|
||||
bits, target = self._calculate_lbry_next_work_required(height, previous_header, header)
|
||||
assert bits == header['bits'], \
|
||||
"bits mismatch: {} vs {} (hash: {})".format(
|
||||
bits, header['bits'], self._hash_header(header))
|
||||
|
||||
_pow_hash = self._pow_hash_header(header)
|
||||
assert int(b'0x' + _pow_hash, 16) <= target, \
|
||||
"insufficient proof of work: {} vs target {}".format(
|
||||
int(b'0x' + _pow_hash, 16), target)
|
||||
|
||||
@staticmethod
|
||||
def _serialize(header):
|
||||
return b''.join([
|
||||
int_to_hex(header['version'], 4),
|
||||
rev_hex(header['prev_block_hash']),
|
||||
rev_hex(header['merkle_root']),
|
||||
rev_hex(header['claim_trie_root']),
|
||||
int_to_hex(int(header['timestamp']), 4),
|
||||
int_to_hex(int(header['bits']), 4),
|
||||
int_to_hex(int(header['nonce']), 4)
|
||||
])
|
||||
|
||||
@staticmethod
|
||||
def _deserialize(height, header):
|
||||
return {
|
||||
'version': hex_to_int(header[0:4]),
|
||||
'prev_block_hash': hash_encode(header[4:36]),
|
||||
'merkle_root': hash_encode(header[36:68]),
|
||||
'claim_trie_root': hash_encode(header[68:100]),
|
||||
'timestamp': hex_to_int(header[100:104]),
|
||||
'bits': hex_to_int(header[104:108]),
|
||||
'nonce': hex_to_int(header[108:112]),
|
||||
'block_height': height
|
||||
}
|
||||
|
||||
def _hash_header(self, header):
|
||||
if header is None:
|
||||
return b'0' * 64
|
||||
return hash_encode(double_sha256(unhexlify(self._serialize(header))))
|
||||
|
||||
def _pow_hash_header(self, header):
|
||||
if header is None:
|
||||
return b'0' * 64
|
||||
return hash_encode(pow_hash(unhexlify(self._serialize(header))))
|
||||
|
||||
def _calculate_lbry_next_work_required(self, height, first, last):
|
||||
""" See: lbrycrd/src/lbry.cpp """
|
||||
|
||||
if height == 0:
|
||||
return self.ledger.genesis_bits, self.ledger.max_target
|
||||
|
||||
if self.ledger.verify_bits_to_target:
|
||||
bits = last['bits']
|
||||
bitsN = (bits >> 24) & 0xff
|
||||
assert 0x03 <= bitsN <= 0x1f, \
|
||||
"First part of bits should be in [0x03, 0x1d], but it was {}".format(hex(bitsN))
|
||||
bitsBase = bits & 0xffffff
|
||||
assert 0x8000 <= bitsBase <= 0x7fffff, \
|
||||
"Second part of bits should be in [0x8000, 0x7fffff] but it was {}".format(bitsBase)
|
||||
|
||||
# new target
|
||||
retargetTimespan = self.ledger.target_timespan
|
||||
nActualTimespan = last['timestamp'] - first['timestamp']
|
||||
|
||||
nModulatedTimespan = retargetTimespan + (nActualTimespan - retargetTimespan) // 8
|
||||
|
||||
nMinTimespan = retargetTimespan - (retargetTimespan // 8)
|
||||
nMaxTimespan = retargetTimespan + (retargetTimespan // 2)
|
||||
|
||||
# Limit adjustment step
|
||||
if nModulatedTimespan < nMinTimespan:
|
||||
nModulatedTimespan = nMinTimespan
|
||||
elif nModulatedTimespan > nMaxTimespan:
|
||||
nModulatedTimespan = nMaxTimespan
|
||||
|
||||
# Retarget
|
||||
bnPowLimit = _ArithUint256(self.ledger.max_target)
|
||||
bnNew = _ArithUint256.SetCompact(last['bits'])
|
||||
bnNew *= nModulatedTimespan
|
||||
bnNew //= nModulatedTimespan
|
||||
if bnNew > bnPowLimit:
|
||||
bnNew = bnPowLimit
|
||||
|
||||
return bnNew.GetCompact(), bnNew._value
|
||||
|
||||
|
||||
class _ArithUint256:
|
||||
""" See: lbrycrd/src/arith_uint256.cpp """
|
||||
|
||||
def __init__(self, value):
|
||||
self._value = value
|
||||
|
||||
def __str__(self):
|
||||
return hex(self._value)
|
||||
|
||||
@staticmethod
|
||||
def fromCompact(nCompact):
|
||||
"""Convert a compact representation into its value"""
|
||||
nSize = nCompact >> 24
|
||||
# the lower 23 bits
|
||||
nWord = nCompact & 0x007fffff
|
||||
if nSize <= 3:
|
||||
return nWord >> 8 * (3 - nSize)
|
||||
else:
|
||||
return nWord << 8 * (nSize - 3)
|
||||
|
||||
@classmethod
|
||||
def SetCompact(cls, nCompact):
|
||||
return cls(cls.fromCompact(nCompact))
|
||||
|
||||
def bits(self):
|
||||
"""Returns the position of the highest bit set plus one."""
|
||||
bn = bin(self._value)[2:]
|
||||
for i, d in enumerate(bn):
|
||||
if d:
|
||||
return (len(bn) - i) + 1
|
||||
return 0
|
||||
|
||||
def GetLow64(self):
|
||||
return self._value & 0xffffffffffffffff
|
||||
|
||||
def GetCompact(self):
|
||||
"""Convert a value into its compact representation"""
|
||||
nSize = (self.bits() + 7) // 8
|
||||
nCompact = 0
|
||||
if nSize <= 3:
|
||||
nCompact = self.GetLow64() << 8 * (3 - nSize)
|
||||
else:
|
||||
bn = _ArithUint256(self._value >> 8 * (nSize - 3))
|
||||
nCompact = bn.GetLow64()
|
||||
# The 0x00800000 bit denotes the sign.
|
||||
# Thus, if it is already set, divide the mantissa by 256 and increase the exponent.
|
||||
if nCompact & 0x00800000:
|
||||
nCompact >>= 8
|
||||
nSize += 1
|
||||
assert (nCompact & ~0x007fffff) == 0
|
||||
assert nSize < 256
|
||||
nCompact |= nSize << 24
|
||||
return nCompact
|
||||
|
||||
def __mul__(self, x):
|
||||
# Take the mod because we are limited to an unsigned 256 bit number
|
||||
return _ArithUint256((self._value * x) % 2 ** 256)
|
||||
|
||||
def __ifloordiv__(self, x):
|
||||
self._value = (self._value // x)
|
||||
return self
|
||||
|
||||
def __gt__(self, x):
|
||||
return self._value > x._value
|
221
torba/basenetwork.py
Normal file
221
torba/basenetwork.py
Normal file
|
@ -0,0 +1,221 @@
|
|||
import six
|
||||
import json
|
||||
import socket
|
||||
import logging
|
||||
from itertools import cycle
|
||||
from twisted.internet import defer, reactor, protocol
|
||||
from twisted.application.internet import ClientService, CancelledError
|
||||
from twisted.internet.endpoints import clientFromString
|
||||
from twisted.protocols.basic import LineOnlyReceiver
|
||||
|
||||
from torba import __version__
|
||||
from torba.stream import StreamController
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
def unicode2bytes(string):
|
||||
if isinstance(string, six.text_type):
|
||||
return string.encode('iso-8859-1')
|
||||
elif isinstance(string, list):
|
||||
return [unicode2bytes(s) for s in string]
|
||||
return string
|
||||
|
||||
|
||||
def bytes2unicode(maybe_bytes):
|
||||
if isinstance(maybe_bytes, bytes):
|
||||
return maybe_bytes.decode()
|
||||
elif isinstance(maybe_bytes, list):
|
||||
return [bytes2unicode(b) for b in maybe_bytes]
|
||||
return maybe_bytes
|
||||
|
||||
|
||||
class StratumClientProtocol(LineOnlyReceiver):
|
||||
delimiter = b'\n'
|
||||
MAX_LENGTH = 100000
|
||||
|
||||
def __init__(self):
|
||||
self.request_id = 0
|
||||
self.lookup_table = {}
|
||||
self.session = {}
|
||||
|
||||
self.on_disconnected_controller = StreamController()
|
||||
self.on_disconnected = self.on_disconnected_controller.stream
|
||||
|
||||
def _get_id(self):
|
||||
self.request_id += 1
|
||||
return self.request_id
|
||||
|
||||
@property
|
||||
def _ip(self):
|
||||
return self.transport.getPeer().host
|
||||
|
||||
def get_session(self):
|
||||
return self.session
|
||||
|
||||
def connectionMade(self):
|
||||
try:
|
||||
self.transport.setTcpNoDelay(True)
|
||||
self.transport.setTcpKeepAlive(True)
|
||||
self.transport.socket.setsockopt(
|
||||
socket.SOL_TCP, socket.TCP_KEEPIDLE, 120
|
||||
# Seconds before sending keepalive probes
|
||||
)
|
||||
self.transport.socket.setsockopt(
|
||||
socket.SOL_TCP, socket.TCP_KEEPINTVL, 1
|
||||
# Interval in seconds between keepalive probes
|
||||
)
|
||||
self.transport.socket.setsockopt(
|
||||
socket.SOL_TCP, socket.TCP_KEEPCNT, 5
|
||||
# Failed keepalive probles before declaring other end dead
|
||||
)
|
||||
except Exception as err:
|
||||
# Supported only by the socket transport,
|
||||
# but there's really no better place in code to trigger this.
|
||||
log.warning("Error setting up socket: %s", err)
|
||||
|
||||
def connectionLost(self, reason=None):
|
||||
self.on_disconnected_controller.add(True)
|
||||
|
||||
def lineReceived(self, line):
|
||||
|
||||
try:
|
||||
# `line` comes in as a byte string but `json.loads` automatically converts everything to
|
||||
# unicode. For keys it's not a big deal but for values there is an expectation
|
||||
# everywhere else in wallet code that most values are byte strings.
|
||||
message = json.loads(
|
||||
line, object_hook=lambda obj: {
|
||||
k: unicode2bytes(v) for k, v in obj.items()
|
||||
}
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
raise ValueError("Cannot decode message '{}'".format(line.strip()))
|
||||
|
||||
if message.get('id'):
|
||||
try:
|
||||
d = self.lookup_table.pop(message['id'])
|
||||
if message.get('error'):
|
||||
d.errback(RuntimeError(*message['error']))
|
||||
else:
|
||||
d.callback(message.get('result'))
|
||||
except KeyError:
|
||||
raise LookupError(
|
||||
"Lookup for deferred object for message ID '{}' failed.".format(message['id']))
|
||||
elif message.get('method') in self.network.subscription_controllers:
|
||||
controller = self.network.subscription_controllers[message['method']]
|
||||
controller.add(message.get('params'))
|
||||
else:
|
||||
log.warning("Cannot handle message '%s'" % line)
|
||||
|
||||
def rpc(self, method, *args):
|
||||
message_id = self._get_id()
|
||||
message = json.dumps({
|
||||
'id': message_id,
|
||||
'method': method,
|
||||
'params': [bytes2unicode(arg) for arg in args]
|
||||
})
|
||||
self.sendLine(message.encode('latin-1'))
|
||||
d = self.lookup_table[message_id] = defer.Deferred()
|
||||
return d
|
||||
|
||||
|
||||
class StratumClientFactory(protocol.ClientFactory):
|
||||
|
||||
protocol = StratumClientProtocol
|
||||
|
||||
def __init__(self, network):
|
||||
self.network = network
|
||||
self.client = None
|
||||
|
||||
def buildProtocol(self, addr):
|
||||
client = self.protocol()
|
||||
client.factory = self
|
||||
client.network = self.network
|
||||
self.client = client
|
||||
return client
|
||||
|
||||
|
||||
class BaseNetwork:
|
||||
|
||||
def __init__(self, config):
|
||||
self.config = config
|
||||
self.client = None
|
||||
self.service = None
|
||||
self.running = False
|
||||
|
||||
self._on_connected_controller = StreamController()
|
||||
self.on_connected = self._on_connected_controller.stream
|
||||
|
||||
self._on_header_controller = StreamController()
|
||||
self.on_header = self._on_header_controller.stream
|
||||
|
||||
self._on_status_controller = StreamController()
|
||||
self.on_status = self._on_status_controller.stream
|
||||
|
||||
self.subscription_controllers = {
|
||||
b'blockchain.headers.subscribe': self._on_header_controller,
|
||||
b'blockchain.address.subscribe': self._on_status_controller,
|
||||
}
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start(self):
|
||||
for server in cycle(self.config['default_servers']):
|
||||
endpoint = clientFromString(reactor, 'tcp:{}:{}'.format(*server))
|
||||
self.service = ClientService(endpoint, StratumClientFactory(self))
|
||||
self.service.startService()
|
||||
try:
|
||||
self.client = yield self.service.whenConnected(failAfterFailures=2)
|
||||
yield self.ensure_server_version()
|
||||
self._on_connected_controller.add(True)
|
||||
yield self.client.on_disconnected.first
|
||||
except CancelledError:
|
||||
return
|
||||
except Exception as e:
|
||||
pass
|
||||
finally:
|
||||
self.client = None
|
||||
if not self.running:
|
||||
return
|
||||
|
||||
def stop(self):
|
||||
self.running = False
|
||||
if self.service is not None:
|
||||
self.service.stopService()
|
||||
if self.is_connected:
|
||||
return self.client.on_disconnected.first
|
||||
else:
|
||||
return defer.succeed(True)
|
||||
|
||||
@property
|
||||
def is_connected(self):
|
||||
return self.client is not None and self.client.connected
|
||||
|
||||
def rpc(self, list_or_method, *args):
|
||||
if self.is_connected:
|
||||
return self.client.rpc(list_or_method, *args)
|
||||
else:
|
||||
raise ConnectionError("Attempting to send rpc request when connection is not available.")
|
||||
|
||||
def ensure_server_version(self, required='1.2'):
|
||||
return self.rpc('server.version', __version__, required)
|
||||
|
||||
def broadcast(self, raw_transaction):
|
||||
return self.rpc('blockchain.transaction.broadcast', raw_transaction)
|
||||
|
||||
def get_history(self, address):
|
||||
return self.rpc('blockchain.address.get_history', address)
|
||||
|
||||
def get_transaction(self, tx_hash):
|
||||
return self.rpc('blockchain.transaction.get', tx_hash)
|
||||
|
||||
def get_merkle(self, tx_hash, height):
|
||||
return self.rpc('blockchain.transaction.get_merkle', tx_hash, height)
|
||||
|
||||
def get_headers(self, height, count=10000):
|
||||
return self.rpc('blockchain.block.headers', height, count)
|
||||
|
||||
def subscribe_headers(self):
|
||||
return self.rpc('blockchain.headers.subscribe', True)
|
||||
|
||||
def subscribe_address(self, address):
|
||||
return self.rpc('blockchain.address.subscribe', address)
|
407
torba/basescript.py
Normal file
407
torba/basescript.py
Normal file
|
@ -0,0 +1,407 @@
|
|||
from itertools import chain
|
||||
from binascii import hexlify
|
||||
from collections import namedtuple
|
||||
|
||||
from torba.bcd_data_stream import BCDataStream
|
||||
from torba.util import subclass_tuple
|
||||
|
||||
# bitcoin opcodes
|
||||
OP_0 = 0x00
|
||||
OP_1 = 0x51
|
||||
OP_16 = 0x60
|
||||
OP_DUP = 0x76
|
||||
OP_HASH160 = 0xa9
|
||||
OP_EQUALVERIFY = 0x88
|
||||
OP_CHECKSIG = 0xac
|
||||
OP_CHECKMULTISIG = 0xae
|
||||
OP_EQUAL = 0x87
|
||||
OP_PUSHDATA1 = 0x4c
|
||||
OP_PUSHDATA2 = 0x4d
|
||||
OP_PUSHDATA4 = 0x4e
|
||||
OP_RETURN = 0x6a
|
||||
OP_2DROP = 0x6d
|
||||
OP_DROP = 0x75
|
||||
|
||||
|
||||
# template matching opcodes (not real opcodes)
|
||||
# base class for PUSH_DATA related opcodes
|
||||
PUSH_DATA_OP = namedtuple('PUSH_DATA_OP', 'name')
|
||||
# opcode for variable length strings
|
||||
PUSH_SINGLE = subclass_tuple('PUSH_SINGLE', PUSH_DATA_OP)
|
||||
# opcode for variable number of variable length strings
|
||||
PUSH_MANY = subclass_tuple('PUSH_MANY', PUSH_DATA_OP)
|
||||
# opcode with embedded subscript parsing
|
||||
PUSH_SUBSCRIPT = namedtuple('PUSH_SUBSCRIPT', 'name template')
|
||||
|
||||
|
||||
def is_push_data_opcode(opcode):
|
||||
return isinstance(opcode, PUSH_DATA_OP) or isinstance(opcode, PUSH_SUBSCRIPT)
|
||||
|
||||
|
||||
def is_push_data_token(token):
|
||||
return 1 <= token <= OP_PUSHDATA4
|
||||
|
||||
|
||||
def push_data(data):
|
||||
size = len(data)
|
||||
if size < OP_PUSHDATA1:
|
||||
yield BCDataStream.uint8.pack(size)
|
||||
elif size <= 0xFF:
|
||||
yield BCDataStream.uint8.pack(OP_PUSHDATA1)
|
||||
yield BCDataStream.uint8.pack(size)
|
||||
elif size <= 0xFFFF:
|
||||
yield BCDataStream.uint8.pack(OP_PUSHDATA2)
|
||||
yield BCDataStream.uint16.pack(size)
|
||||
else:
|
||||
yield BCDataStream.uint8.pack(OP_PUSHDATA4)
|
||||
yield BCDataStream.uint32.pack(size)
|
||||
yield data
|
||||
|
||||
|
||||
def read_data(token, stream):
|
||||
if token < OP_PUSHDATA1:
|
||||
return stream.read(token)
|
||||
elif token == OP_PUSHDATA1:
|
||||
return stream.read(stream.read_uint8())
|
||||
elif token == OP_PUSHDATA2:
|
||||
return stream.read(stream.read_uint16())
|
||||
else:
|
||||
return stream.read(stream.read_uint32())
|
||||
|
||||
|
||||
# opcode for OP_1 - OP_16
|
||||
SMALL_INTEGER = namedtuple('SMALL_INTEGER', 'name')
|
||||
|
||||
|
||||
def is_small_integer(token):
|
||||
return OP_1 <= token <= OP_16
|
||||
|
||||
|
||||
def push_small_integer(num):
|
||||
assert 1 <= num <= 16
|
||||
yield BCDataStream.uint8.pack(OP_1 + (num - 1))
|
||||
|
||||
|
||||
def read_small_integer(token):
|
||||
return (token - OP_1) + 1
|
||||
|
||||
|
||||
class Token(namedtuple('Token', 'value')):
|
||||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
name = None
|
||||
for var_name, var_value in globals().items():
|
||||
if var_name.startswith('OP_') and var_value == self.value:
|
||||
name = var_name
|
||||
break
|
||||
return name or self.value
|
||||
|
||||
|
||||
class DataToken(Token):
|
||||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
return '"{}"'.format(hexlify(self.value))
|
||||
|
||||
|
||||
class SmallIntegerToken(Token):
|
||||
__slots__ = ()
|
||||
|
||||
def __repr__(self):
|
||||
return 'SmallIntegerToken({})'.format(self.value)
|
||||
|
||||
|
||||
def token_producer(source):
|
||||
token = source.read_uint8()
|
||||
while token is not None:
|
||||
if is_push_data_token(token):
|
||||
yield DataToken(read_data(token, source))
|
||||
elif is_small_integer(token):
|
||||
yield SmallIntegerToken(read_small_integer(token))
|
||||
else:
|
||||
yield Token(token)
|
||||
token = source.read_uint8()
|
||||
|
||||
|
||||
def tokenize(source):
|
||||
return list(token_producer(source))
|
||||
|
||||
|
||||
class ScriptError(Exception):
|
||||
""" General script handling error. """
|
||||
|
||||
|
||||
class ParseError(ScriptError):
|
||||
""" Script parsing error. """
|
||||
|
||||
|
||||
class Parser:
|
||||
|
||||
def __init__(self, opcodes, tokens):
|
||||
self.opcodes = opcodes
|
||||
self.tokens = tokens
|
||||
self.values = {}
|
||||
self.token_index = 0
|
||||
self.opcode_index = 0
|
||||
|
||||
def parse(self):
|
||||
while self.token_index < len(self.tokens) and self.opcode_index < len(self.opcodes):
|
||||
token = self.tokens[self.token_index]
|
||||
opcode = self.opcodes[self.opcode_index]
|
||||
if isinstance(token, DataToken):
|
||||
if isinstance(opcode, (PUSH_SINGLE, PUSH_SUBSCRIPT)):
|
||||
self.push_single(opcode, token.value)
|
||||
elif isinstance(opcode, PUSH_MANY):
|
||||
self.consume_many_non_greedy()
|
||||
else:
|
||||
raise ParseError("DataToken found but opcode was '{}'.".format(opcode))
|
||||
elif isinstance(token, SmallIntegerToken):
|
||||
if isinstance(opcode, SMALL_INTEGER):
|
||||
self.values[opcode.name] = token.value
|
||||
else:
|
||||
raise ParseError("SmallIntegerToken found but opcode was '{}'.".format(opcode))
|
||||
elif token.value == opcode:
|
||||
pass
|
||||
else:
|
||||
raise ParseError("Token is '{}' and opcode is '{}'.".format(token.value, opcode))
|
||||
self.token_index += 1
|
||||
self.opcode_index += 1
|
||||
|
||||
if self.token_index < len(self.tokens):
|
||||
raise ParseError("Parse completed without all tokens being consumed.")
|
||||
|
||||
if self.opcode_index < len(self.opcodes):
|
||||
raise ParseError("Parse completed without all opcodes being consumed.")
|
||||
|
||||
return self
|
||||
|
||||
def consume_many_non_greedy(self):
|
||||
""" Allows PUSH_MANY to consume data without being greedy
|
||||
in cases when one or more PUSH_SINGLEs follow a PUSH_MANY. This will
|
||||
prioritize giving all PUSH_SINGLEs some data and only after that
|
||||
subsume the rest into PUSH_MANY.
|
||||
"""
|
||||
|
||||
token_values = []
|
||||
while self.token_index < len(self.tokens):
|
||||
token = self.tokens[self.token_index]
|
||||
if not isinstance(token, DataToken):
|
||||
self.token_index -= 1
|
||||
break
|
||||
token_values.append(token.value)
|
||||
self.token_index += 1
|
||||
|
||||
push_opcodes = []
|
||||
push_many_count = 0
|
||||
while self.opcode_index < len(self.opcodes):
|
||||
opcode = self.opcodes[self.opcode_index]
|
||||
if not is_push_data_opcode(opcode):
|
||||
self.opcode_index -= 1
|
||||
break
|
||||
if isinstance(opcode, PUSH_MANY):
|
||||
push_many_count += 1
|
||||
push_opcodes.append(opcode)
|
||||
self.opcode_index += 1
|
||||
|
||||
if push_many_count > 1:
|
||||
raise ParseError(
|
||||
"Cannot have more than one consecutive PUSH_MANY, as there is no way to tell which"
|
||||
" token value should go into which PUSH_MANY."
|
||||
)
|
||||
|
||||
if len(push_opcodes) > len(token_values):
|
||||
raise ParseError(
|
||||
"Not enough token values to match all of the PUSH_MANY and PUSH_SINGLE opcodes."
|
||||
)
|
||||
|
||||
many_opcode = push_opcodes.pop(0)
|
||||
|
||||
# consume data into PUSH_SINGLE opcodes, working backwards
|
||||
for opcode in reversed(push_opcodes):
|
||||
self.push_single(opcode, token_values.pop())
|
||||
|
||||
# finally PUSH_MANY gets everything that's left
|
||||
self.values[many_opcode.name] = token_values
|
||||
|
||||
def push_single(self, opcode, value):
|
||||
if isinstance(opcode, PUSH_SINGLE):
|
||||
self.values[opcode.name] = value
|
||||
elif isinstance(opcode, PUSH_SUBSCRIPT):
|
||||
self.values[opcode.name] = Script.from_source_with_template(value, opcode.template)
|
||||
else:
|
||||
raise ParseError("Not a push single or subscript: {}".format(opcode))
|
||||
|
||||
|
||||
class Template(object):
|
||||
|
||||
__slots__ = 'name', 'opcodes'
|
||||
|
||||
def __init__(self, name, opcodes):
|
||||
self.name = name
|
||||
self.opcodes = opcodes
|
||||
|
||||
def parse(self, tokens):
|
||||
return Parser(self.opcodes, tokens).parse().values
|
||||
|
||||
def generate(self, values):
|
||||
source = BCDataStream()
|
||||
for opcode in self.opcodes:
|
||||
if isinstance(opcode, PUSH_SINGLE):
|
||||
data = values[opcode.name]
|
||||
source.write_many(push_data(data))
|
||||
elif isinstance(opcode, PUSH_SUBSCRIPT):
|
||||
data = values[opcode.name]
|
||||
source.write_many(push_data(data.source))
|
||||
elif isinstance(opcode, PUSH_MANY):
|
||||
for data in values[opcode.name]:
|
||||
source.write_many(push_data(data))
|
||||
elif isinstance(opcode, SMALL_INTEGER):
|
||||
data = values[opcode.name]
|
||||
source.write_many(push_small_integer(data))
|
||||
else:
|
||||
source.write_uint8(opcode)
|
||||
return source.get_bytes()
|
||||
|
||||
|
||||
class Script(object):
|
||||
|
||||
__slots__ = 'source', 'template', 'values'
|
||||
|
||||
templates = []
|
||||
|
||||
def __init__(self, source=None, template=None, values=None, template_hint=None):
|
||||
self.source = source
|
||||
self.template = template
|
||||
self.values = values
|
||||
if source:
|
||||
self.parse(template_hint)
|
||||
elif template and values:
|
||||
self.generate()
|
||||
|
||||
@property
|
||||
def tokens(self):
|
||||
return tokenize(BCDataStream(self.source))
|
||||
|
||||
@classmethod
|
||||
def from_source_with_template(cls, source, template):
|
||||
return cls(source, template_hint=template)
|
||||
|
||||
def parse(self, template_hint=None):
|
||||
tokens = self.tokens
|
||||
for template in chain((template_hint,), self.templates):
|
||||
if not template:
|
||||
continue
|
||||
try:
|
||||
self.values = template.parse(tokens)
|
||||
self.template = template
|
||||
return
|
||||
except ParseError:
|
||||
continue
|
||||
raise ValueError('No matching templates for source: {}'.format(hexlify(self.source)))
|
||||
|
||||
def generate(self):
|
||||
self.source = self.template.generate(self.values)
|
||||
|
||||
|
||||
class BaseInputScript(Script):
|
||||
""" Input / redeem script templates (aka scriptSig) """
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
REDEEM_PUBKEY = Template('pubkey', (
|
||||
PUSH_SINGLE('signature'),
|
||||
))
|
||||
REDEEM_PUBKEY_HASH = Template('pubkey_hash', (
|
||||
PUSH_SINGLE('signature'), PUSH_SINGLE('pubkey')
|
||||
))
|
||||
REDEEM_SCRIPT = Template('script', (
|
||||
SMALL_INTEGER('signatures_count'), PUSH_MANY('pubkeys'), SMALL_INTEGER('pubkeys_count'),
|
||||
OP_CHECKMULTISIG
|
||||
))
|
||||
REDEEM_SCRIPT_HASH = Template('script_hash', (
|
||||
OP_0, PUSH_MANY('signatures'), PUSH_SUBSCRIPT('script', REDEEM_SCRIPT)
|
||||
))
|
||||
|
||||
templates = [
|
||||
REDEEM_PUBKEY,
|
||||
REDEEM_PUBKEY_HASH,
|
||||
REDEEM_SCRIPT_HASH,
|
||||
REDEEM_SCRIPT
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def redeem_pubkey_hash(cls, signature, pubkey):
|
||||
return cls(template=cls.REDEEM_PUBKEY_HASH, values={
|
||||
'signature': signature,
|
||||
'pubkey': pubkey
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def redeem_script_hash(cls, signatures, pubkeys):
|
||||
return cls(template=cls.REDEEM_SCRIPT_HASH, values={
|
||||
'signatures': signatures,
|
||||
'script': cls.redeem_script(signatures, pubkeys)
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def redeem_script(cls, signatures, pubkeys):
|
||||
return cls(template=cls.REDEEM_SCRIPT, values={
|
||||
'signatures_count': len(signatures),
|
||||
'pubkeys': pubkeys,
|
||||
'pubkeys_count': len(pubkeys)
|
||||
})
|
||||
|
||||
|
||||
class BaseOutputScript(Script):
|
||||
|
||||
__slots__ = ()
|
||||
|
||||
# output / payment script templates (aka scriptPubKey)
|
||||
PAY_PUBKEY_FULL = Template('pay_pubkey_full', (
|
||||
PUSH_SINGLE('pubkey'), OP_CHECKSIG
|
||||
))
|
||||
PAY_PUBKEY_HASH = Template('pay_pubkey_hash', (
|
||||
OP_DUP, OP_HASH160, PUSH_SINGLE('pubkey_hash'), OP_EQUALVERIFY, OP_CHECKSIG
|
||||
))
|
||||
PAY_SCRIPT_HASH = Template('pay_script_hash', (
|
||||
OP_HASH160, PUSH_SINGLE('script_hash'), OP_EQUAL
|
||||
))
|
||||
RETURN_DATA = Template('return_data', (
|
||||
OP_RETURN, PUSH_SINGLE('data')
|
||||
))
|
||||
|
||||
templates = [
|
||||
PAY_PUBKEY_FULL,
|
||||
PAY_PUBKEY_HASH,
|
||||
PAY_SCRIPT_HASH,
|
||||
RETURN_DATA
|
||||
]
|
||||
|
||||
@classmethod
|
||||
def pay_pubkey_hash(cls, pubkey_hash):
|
||||
return cls(template=cls.PAY_PUBKEY_HASH, values={
|
||||
'pubkey_hash': pubkey_hash
|
||||
})
|
||||
|
||||
@classmethod
|
||||
def pay_script_hash(cls, script_hash):
|
||||
return cls(template=cls.PAY_SCRIPT_HASH, values={
|
||||
'script_hash': script_hash
|
||||
})
|
||||
|
||||
@property
|
||||
def is_pay_pubkey(self):
|
||||
return self.template.name.endswith('pay_pubkey_full')
|
||||
|
||||
@property
|
||||
def is_pay_pubkey_hash(self):
|
||||
return self.template.name.endswith('pay_pubkey_hash')
|
||||
|
||||
@property
|
||||
def is_pay_script_hash(self):
|
||||
return self.template.name.endswith('pay_script_hash')
|
||||
|
||||
@property
|
||||
def is_return_data(self):
|
||||
return self.template.name.endswith('return_data')
|
287
torba/basetransaction.py
Normal file
287
torba/basetransaction.py
Normal file
|
@ -0,0 +1,287 @@
|
|||
import six
|
||||
import logging
|
||||
from typing import List
|
||||
from collections import namedtuple
|
||||
|
||||
from torba.basecoin import BaseCoin
|
||||
from torba.basescript import BaseInputScript, BaseOutputScript
|
||||
from torba.bcd_data_stream import BCDataStream
|
||||
from torba.hash import sha256
|
||||
from torba.account import Account
|
||||
from torba.util import ReadOnlyList
|
||||
|
||||
|
||||
log = logging.getLogger()
|
||||
|
||||
|
||||
NULL_HASH = b'\x00'*32
|
||||
|
||||
|
||||
class InputOutput(object):
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
""" Size of this input / output in bytes. """
|
||||
stream = BCDataStream()
|
||||
self.serialize_to(stream)
|
||||
return len(stream.get_bytes())
|
||||
|
||||
def serialize_to(self, stream):
|
||||
raise NotImplemented
|
||||
|
||||
|
||||
class BaseInput(InputOutput):
|
||||
|
||||
script_class = None
|
||||
|
||||
NULL_SIGNATURE = b'\x00'*72
|
||||
NULL_PUBLIC_KEY = b'\x00'*33
|
||||
|
||||
def __init__(self, output_or_txid_index, script, sequence=0xFFFFFFFF):
|
||||
if isinstance(output_or_txid_index, BaseOutput):
|
||||
self.output = output_or_txid_index # type: BaseOutput
|
||||
self.output_txid = self.output.transaction.hash
|
||||
self.output_index = self.output.index
|
||||
else:
|
||||
self.output = None # type: BaseOutput
|
||||
self.output_txid, self.output_index = output_or_txid_index
|
||||
self.sequence = sequence
|
||||
self.is_coinbase = self.output_txid == NULL_HASH
|
||||
self.coinbase = script if self.is_coinbase else None
|
||||
self.script = script if not self.is_coinbase else None # type: BaseInputScript
|
||||
|
||||
def link_output(self, output):
|
||||
assert self.output is None
|
||||
assert self.output_txid == output.transaction.hash
|
||||
assert self.output_index == output.index
|
||||
self.output = output
|
||||
|
||||
@classmethod
|
||||
def spend(cls, output):
|
||||
""" Create an input to spend the output."""
|
||||
assert output.script.is_pay_pubkey_hash, 'Attempting to spend unsupported output.'
|
||||
script = cls.script_class.redeem_pubkey_hash(cls.NULL_SIGNATURE, cls.NULL_PUBLIC_KEY)
|
||||
return cls(output, script)
|
||||
|
||||
@property
|
||||
def amount(self):
|
||||
""" Amount this input adds to the transaction. """
|
||||
if self.output is None:
|
||||
raise ValueError('Cannot get input value without referenced output.')
|
||||
return self.output.amount
|
||||
|
||||
@classmethod
|
||||
def deserialize_from(cls, stream):
|
||||
txid = stream.read(32)
|
||||
index = stream.read_uint32()
|
||||
script = stream.read_string()
|
||||
sequence = stream.read_uint32()
|
||||
return cls(
|
||||
(txid, index),
|
||||
cls.script_class(script) if not txid == NULL_HASH else script,
|
||||
sequence
|
||||
)
|
||||
|
||||
def serialize_to(self, stream, alternate_script=None):
|
||||
stream.write(self.output_txid)
|
||||
stream.write_uint32(self.output_index)
|
||||
if alternate_script is not None:
|
||||
stream.write_string(alternate_script)
|
||||
else:
|
||||
if self.is_coinbase:
|
||||
stream.write_string(self.coinbase)
|
||||
else:
|
||||
stream.write_string(self.script.source)
|
||||
stream.write_uint32(self.sequence)
|
||||
|
||||
|
||||
class BaseOutputAmountEstimator(object):
|
||||
|
||||
__slots__ = 'coin', 'output', 'fee', 'effective_amount'
|
||||
|
||||
def __init__(self, coin, txo): # type: (BaseCoin, BaseOutput) -> None
|
||||
self.coin = coin
|
||||
self.output = txo
|
||||
txi = coin.transaction_class.input_class.spend(txo)
|
||||
self.fee = coin.get_input_output_fee(txi)
|
||||
self.effective_amount = txo.amount - self.fee
|
||||
|
||||
def __lt__(self, other):
|
||||
return self.effective_amount < other.effective_amount
|
||||
|
||||
|
||||
class BaseOutput(InputOutput):
|
||||
|
||||
script_class = None
|
||||
estimator_class = BaseOutputAmountEstimator
|
||||
|
||||
def __init__(self, amount, script):
|
||||
self.amount = amount # type: int
|
||||
self.script = script # type: BaseOutputScript
|
||||
self.transaction = None # type: BaseTransaction
|
||||
self.index = None # type: int
|
||||
|
||||
def get_estimator(self, coin):
|
||||
return self.estimator_class(coin, self)
|
||||
|
||||
@classmethod
|
||||
def pay_pubkey_hash(cls, amount, pubkey_hash):
|
||||
return cls(amount, cls.script_class.pay_pubkey_hash(pubkey_hash))
|
||||
|
||||
@classmethod
|
||||
def deserialize_from(cls, stream):
|
||||
return cls(
|
||||
amount=stream.read_uint64(),
|
||||
script=cls.script_class(stream.read_string())
|
||||
)
|
||||
|
||||
def serialize_to(self, stream):
|
||||
stream.write_uint64(self.amount)
|
||||
stream.write_string(self.script.source)
|
||||
|
||||
|
||||
class BaseTransaction:
|
||||
|
||||
input_class = None
|
||||
output_class = None
|
||||
|
||||
def __init__(self, raw=None, version=1, locktime=0, height=None, is_saved=False):
|
||||
self._raw = raw
|
||||
self._hash = None
|
||||
self._id = None
|
||||
self.version = version # type: int
|
||||
self.locktime = locktime # type: int
|
||||
self.height = height # type: int
|
||||
self._inputs = [] # type: List[BaseInput]
|
||||
self._outputs = [] # type: List[BaseOutput]
|
||||
self.is_saved = is_saved # type: bool
|
||||
if raw is not None:
|
||||
self._deserialize()
|
||||
|
||||
@property
|
||||
def id(self):
|
||||
if self._id is None:
|
||||
self._id = self.hash[::-1]
|
||||
return self._id
|
||||
|
||||
@property
|
||||
def hash(self):
|
||||
if self._hash is None:
|
||||
self._hash = sha256(sha256(self.raw))
|
||||
return self._hash
|
||||
|
||||
@property
|
||||
def raw(self):
|
||||
if self._raw is None:
|
||||
self._raw = self._serialize()
|
||||
return self._raw
|
||||
|
||||
def _reset(self):
|
||||
self._id = None
|
||||
self._hash = None
|
||||
self._raw = None
|
||||
|
||||
@property
|
||||
def inputs(self): # type: () -> ReadOnlyList[BaseInput]
|
||||
return ReadOnlyList(self._inputs)
|
||||
|
||||
@property
|
||||
def outputs(self): # type: () -> ReadOnlyList[BaseOutput]
|
||||
return ReadOnlyList(self._outputs)
|
||||
|
||||
def add_inputs(self, inputs):
|
||||
self._inputs.extend(inputs)
|
||||
self._reset()
|
||||
return self
|
||||
|
||||
def add_outputs(self, outputs):
|
||||
for txo in outputs:
|
||||
txo.transaction = self
|
||||
txo.index = len(self._outputs)
|
||||
self._outputs.append(txo)
|
||||
self._reset()
|
||||
return self
|
||||
|
||||
@property
|
||||
def fee(self):
|
||||
""" Fee that will actually be paid."""
|
||||
return self.input_sum - self.output_sum
|
||||
|
||||
@property
|
||||
def size(self):
|
||||
""" Size in bytes of the entire transaction. """
|
||||
return len(self.raw)
|
||||
|
||||
@property
|
||||
def base_size(self):
|
||||
""" Size in bytes of transaction meta data and all outputs; without inputs. """
|
||||
return len(self._serialize(with_inputs=False))
|
||||
|
||||
def _serialize(self, with_inputs=True):
|
||||
stream = BCDataStream()
|
||||
stream.write_uint32(self.version)
|
||||
if with_inputs:
|
||||
stream.write_compact_size(len(self._inputs))
|
||||
for txin in self._inputs:
|
||||
txin.serialize_to(stream)
|
||||
stream.write_compact_size(len(self._outputs))
|
||||
for txout in self._outputs:
|
||||
txout.serialize_to(stream)
|
||||
stream.write_uint32(self.locktime)
|
||||
return stream.get_bytes()
|
||||
|
||||
def _serialize_for_signature(self, signing_input):
|
||||
stream = BCDataStream()
|
||||
stream.write_uint32(self.version)
|
||||
stream.write_compact_size(len(self._inputs))
|
||||
for i, txin in enumerate(self._inputs):
|
||||
if signing_input == i:
|
||||
txin.serialize_to(stream, txin.output.script.source)
|
||||
else:
|
||||
txin.serialize_to(stream, b'')
|
||||
stream.write_compact_size(len(self._outputs))
|
||||
for txout in self._outputs:
|
||||
txout.serialize_to(stream)
|
||||
stream.write_uint32(self.locktime)
|
||||
stream.write_uint32(1) # signature hash type: SIGHASH_ALL
|
||||
return stream.get_bytes()
|
||||
|
||||
def _deserialize(self):
|
||||
if self._raw is not None:
|
||||
stream = BCDataStream(self._raw)
|
||||
self.version = stream.read_uint32()
|
||||
input_count = stream.read_compact_size()
|
||||
self.add_inputs([
|
||||
self.input_class.deserialize_from(stream) for _ in range(input_count)
|
||||
])
|
||||
output_count = stream.read_compact_size()
|
||||
self.add_outputs([
|
||||
self.output_class.deserialize_from(stream) for _ in range(output_count)
|
||||
])
|
||||
self.locktime = stream.read_uint32()
|
||||
|
||||
def sign(self, account): # type: (Account) -> BaseTransaction
|
||||
for i, txi in enumerate(self._inputs):
|
||||
txo_script = txi.output.script
|
||||
if txo_script.is_pay_pubkey_hash:
|
||||
address = account.coin.hash160_to_address(txo_script.values['pubkey_hash'])
|
||||
private_key = account.get_private_key_for_address(address)
|
||||
tx = self._serialize_for_signature(i)
|
||||
txi.script.values['signature'] = private_key.sign(tx)+six.int2byte(1)
|
||||
txi.script.values['pubkey'] = private_key.public_key.pubkey_bytes
|
||||
txi.script.generate()
|
||||
self._reset()
|
||||
return self
|
||||
|
||||
def sort(self):
|
||||
# See https://github.com/kristovatlas/rfc/blob/master/bips/bip-li01.mediawiki
|
||||
self._inputs.sort(key=lambda i: (i['prevout_hash'], i['prevout_n']))
|
||||
self._outputs.sort(key=lambda o: (o[2], pay_script(o[0], o[1])))
|
||||
|
||||
@property
|
||||
def input_sum(self):
|
||||
return sum(i.amount for i in self._inputs)
|
||||
|
||||
@property
|
||||
def output_sum(self):
|
||||
return sum(o.amount for o in self._outputs)
|
126
torba/bcd_data_stream.py
Normal file
126
torba/bcd_data_stream.py
Normal file
|
@ -0,0 +1,126 @@
|
|||
import struct
|
||||
from io import BytesIO
|
||||
|
||||
|
||||
class BCDataStream:
|
||||
|
||||
def __init__(self, data=None):
|
||||
self.data = BytesIO(data)
|
||||
|
||||
@property
|
||||
def is_at_beginning(self):
|
||||
return self.data.tell() == 0
|
||||
|
||||
def reset(self):
|
||||
self.data.seek(0)
|
||||
|
||||
def get_bytes(self):
|
||||
return self.data.getvalue()
|
||||
|
||||
def read(self, size):
|
||||
return self.data.read(size)
|
||||
|
||||
def write(self, data):
|
||||
self.data.write(data)
|
||||
|
||||
def write_many(self, many):
|
||||
self.data.writelines(many)
|
||||
|
||||
def read_string(self):
|
||||
return self.read(self.read_compact_size())
|
||||
|
||||
def write_string(self, s):
|
||||
self.write_compact_size(len(s))
|
||||
self.write(s)
|
||||
|
||||
def read_compact_size(self):
|
||||
size = self.read_uint8()
|
||||
if size < 253:
|
||||
return size
|
||||
if size == 253:
|
||||
return self.read_uint16()
|
||||
elif size == 254:
|
||||
return self.read_uint32()
|
||||
elif size == 255:
|
||||
return self.read_uint64()
|
||||
|
||||
def write_compact_size(self, size):
|
||||
if size < 253:
|
||||
self.write_uint8(size)
|
||||
elif size <= 0xFFFF:
|
||||
self.write_uint8(253)
|
||||
self.write_uint16(size)
|
||||
elif size <= 0xFFFFFFFF:
|
||||
self.write_uint8(254)
|
||||
self.write_uint32(size)
|
||||
else:
|
||||
self.write_uint8(255)
|
||||
self.write_uint64(size)
|
||||
|
||||
def read_boolean(self):
|
||||
return self.read_uint8() != 0
|
||||
|
||||
def write_boolean(self, val):
|
||||
return self.write_uint8(1 if val else 0)
|
||||
|
||||
int8 = struct.Struct('b')
|
||||
uint8 = struct.Struct('B')
|
||||
int16 = struct.Struct('<h')
|
||||
uint16 = struct.Struct('<H')
|
||||
int32 = struct.Struct('<i')
|
||||
uint32 = struct.Struct('<I')
|
||||
int64 = struct.Struct('<q')
|
||||
uint64 = struct.Struct('<Q')
|
||||
|
||||
def _read_struct(self, fmt):
|
||||
value = self.read(fmt.size)
|
||||
if len(value) > 0:
|
||||
return fmt.unpack(value)[0]
|
||||
|
||||
def read_int8(self):
|
||||
return self._read_struct(self.int8)
|
||||
|
||||
def read_uint8(self):
|
||||
return self._read_struct(self.uint8)
|
||||
|
||||
def read_int16(self):
|
||||
return self._read_struct(self.int16)
|
||||
|
||||
def read_uint16(self):
|
||||
return self._read_struct(self.uint16)
|
||||
|
||||
def read_int32(self):
|
||||
return self._read_struct(self.int32)
|
||||
|
||||
def read_uint32(self):
|
||||
return self._read_struct(self.uint32)
|
||||
|
||||
def read_int64(self):
|
||||
return self._read_struct(self.int64)
|
||||
|
||||
def read_uint64(self):
|
||||
return self._read_struct(self.uint64)
|
||||
|
||||
def write_int8(self, val):
|
||||
self.write(self.int8.pack(val))
|
||||
|
||||
def write_uint8(self, val):
|
||||
self.write(self.uint8.pack(val))
|
||||
|
||||
def write_int16(self, val):
|
||||
self.write(self.int16.pack(val))
|
||||
|
||||
def write_uint16(self, val):
|
||||
self.write(self.uint16.pack(val))
|
||||
|
||||
def write_int32(self, val):
|
||||
self.write(self.int32.pack(val))
|
||||
|
||||
def write_uint32(self, val):
|
||||
self.write(self.uint32.pack(val))
|
||||
|
||||
def write_int64(self, val):
|
||||
self.write(self.int64.pack(val))
|
||||
|
||||
def write_uint64(self, val):
|
||||
self.write(self.uint64.pack(val))
|
329
torba/bip32.py
Normal file
329
torba/bip32.py
Normal file
|
@ -0,0 +1,329 @@
|
|||
# Copyright (c) 2017, Neil Booth
|
||||
# Copyright (c) 2018, LBRY Inc.
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# See the file "LICENCE" for information about the copyright
|
||||
# and warranty status of this software.
|
||||
|
||||
""" Logic for BIP32 Hierarchical Key Derivation. """
|
||||
|
||||
import struct
|
||||
import hashlib
|
||||
from six import int2byte, byte2int, indexbytes
|
||||
|
||||
import ecdsa
|
||||
import ecdsa.ellipticcurve as EC
|
||||
import ecdsa.numbertheory as NT
|
||||
|
||||
from torba.basecoin import BaseCoin
|
||||
from torba.hash import Base58, hmac_sha512, hash160, double_sha256
|
||||
from torba.util import cachedproperty, bytes_to_int, int_to_bytes
|
||||
|
||||
|
||||
class DerivationError(Exception):
|
||||
""" Raised when an invalid derivation occurs. """
|
||||
|
||||
|
||||
class _KeyBase(object):
|
||||
""" A BIP32 Key, public or private. """
|
||||
|
||||
CURVE = ecdsa.SECP256k1
|
||||
|
||||
def __init__(self, coin, chain_code, n, depth, parent):
|
||||
if not isinstance(coin, BaseCoin):
|
||||
raise TypeError('invalid coin')
|
||||
if not isinstance(chain_code, (bytes, bytearray)):
|
||||
raise TypeError('chain code must be raw bytes')
|
||||
if len(chain_code) != 32:
|
||||
raise ValueError('invalid chain code')
|
||||
if not 0 <= n < 1 << 32:
|
||||
raise ValueError('invalid child number')
|
||||
if not 0 <= depth < 256:
|
||||
raise ValueError('invalid depth')
|
||||
if parent is not None:
|
||||
if not isinstance(parent, type(self)):
|
||||
raise TypeError('parent key has bad type')
|
||||
self.coin = coin
|
||||
self.chain_code = chain_code
|
||||
self.n = n
|
||||
self.depth = depth
|
||||
self.parent = parent
|
||||
|
||||
def _hmac_sha512(self, msg):
|
||||
""" Use SHA-512 to provide an HMAC, returned as a pair of 32-byte objects. """
|
||||
hmac = hmac_sha512(self.chain_code, msg)
|
||||
return hmac[:32], hmac[32:]
|
||||
|
||||
def _extended_key(self, ver_bytes, raw_serkey):
|
||||
""" Return the 78-byte extended key given prefix version bytes and serialized key bytes. """
|
||||
if not isinstance(ver_bytes, (bytes, bytearray)):
|
||||
raise TypeError('ver_bytes must be raw bytes')
|
||||
if len(ver_bytes) != 4:
|
||||
raise ValueError('ver_bytes must have length 4')
|
||||
if not isinstance(raw_serkey, (bytes, bytearray)):
|
||||
raise TypeError('raw_serkey must be raw bytes')
|
||||
if len(raw_serkey) != 33:
|
||||
raise ValueError('raw_serkey must have length 33')
|
||||
|
||||
return (ver_bytes + int2byte(self.depth)
|
||||
+ self.parent_fingerprint() + struct.pack('>I', self.n)
|
||||
+ self.chain_code + raw_serkey)
|
||||
|
||||
def fingerprint(self):
|
||||
""" Return the key's fingerprint as 4 bytes. """
|
||||
return self.identifier()[:4]
|
||||
|
||||
def parent_fingerprint(self):
|
||||
""" Return the parent key's fingerprint as 4 bytes. """
|
||||
return self.parent.fingerprint() if self.parent else int2byte(0)*4
|
||||
|
||||
def extended_key_string(self):
|
||||
""" Return an extended key as a base58 string. """
|
||||
return Base58.encode_check(self.extended_key())
|
||||
|
||||
|
||||
class PubKey(_KeyBase):
|
||||
""" A BIP32 public key. """
|
||||
|
||||
def __init__(self, coin, pubkey, chain_code, n, depth, parent=None):
|
||||
super(PubKey, self).__init__(coin, chain_code, n, depth, parent)
|
||||
if isinstance(pubkey, ecdsa.VerifyingKey):
|
||||
self.verifying_key = pubkey
|
||||
else:
|
||||
self.verifying_key = self._verifying_key_from_pubkey(pubkey)
|
||||
|
||||
@classmethod
|
||||
def _verifying_key_from_pubkey(cls, pubkey):
|
||||
""" Converts a 33-byte compressed pubkey into an ecdsa.VerifyingKey object. """
|
||||
if not isinstance(pubkey, (bytes, bytearray)):
|
||||
raise TypeError('pubkey must be raw bytes')
|
||||
if len(pubkey) != 33:
|
||||
raise ValueError('pubkey must be 33 bytes')
|
||||
if byte2int(pubkey[0]) not in (2, 3):
|
||||
raise ValueError('invalid pubkey prefix byte')
|
||||
curve = cls.CURVE.curve
|
||||
|
||||
is_odd = byte2int(pubkey[0]) == 3
|
||||
x = bytes_to_int(pubkey[1:])
|
||||
|
||||
# p is the finite field order
|
||||
a, b, p = curve.a(), curve.b(), curve.p()
|
||||
y2 = pow(x, 3, p) + b
|
||||
assert a == 0 # Otherwise y2 += a * pow(x, 2, p)
|
||||
y = NT.square_root_mod_prime(y2 % p, p)
|
||||
if bool(y & 1) != is_odd:
|
||||
y = p - y
|
||||
point = EC.Point(curve, x, y)
|
||||
|
||||
return ecdsa.VerifyingKey.from_public_point(point, curve=cls.CURVE)
|
||||
|
||||
@cachedproperty
|
||||
def pubkey_bytes(self):
|
||||
""" Return the compressed public key as 33 bytes. """
|
||||
point = self.verifying_key.pubkey.point
|
||||
prefix = int2byte(2 + (point.y() & 1))
|
||||
padded_bytes = _exponent_to_bytes(point.x())
|
||||
return prefix + padded_bytes
|
||||
|
||||
@cachedproperty
|
||||
def address(self):
|
||||
""" The public key as a P2PKH address. """
|
||||
return self.coin.public_key_to_address(self.pubkey_bytes)
|
||||
|
||||
def ec_point(self):
|
||||
return self.verifying_key.pubkey.point
|
||||
|
||||
def child(self, n):
|
||||
""" Return the derived child extended pubkey at index N. """
|
||||
if not 0 <= n < (1 << 31):
|
||||
raise ValueError('invalid BIP32 public key child number')
|
||||
|
||||
msg = self.pubkey_bytes + struct.pack('>I', n)
|
||||
L, R = self._hmac_sha512(msg)
|
||||
|
||||
curve = self.CURVE
|
||||
L = bytes_to_int(L)
|
||||
if L >= curve.order:
|
||||
raise DerivationError
|
||||
|
||||
point = curve.generator * L + self.ec_point()
|
||||
if point == EC.INFINITY:
|
||||
raise DerivationError
|
||||
|
||||
verkey = ecdsa.VerifyingKey.from_public_point(point, curve=curve)
|
||||
|
||||
return PubKey(self.coin, verkey, R, n, self.depth + 1, self)
|
||||
|
||||
def identifier(self):
|
||||
""" Return the key's identifier as 20 bytes. """
|
||||
return hash160(self.pubkey_bytes)
|
||||
|
||||
def extended_key(self):
|
||||
""" Return a raw extended public key. """
|
||||
return self._extended_key(
|
||||
self.coin.extended_public_key_prefix,
|
||||
self.pubkey_bytes
|
||||
)
|
||||
|
||||
|
||||
class LowSValueSigningKey(ecdsa.SigningKey):
|
||||
"""
|
||||
Enforce low S values in signatures
|
||||
BIP-0062: https://github.com/bitcoin/bips/blob/master/bip-0062.mediawiki#low-s-values-in-signatures
|
||||
"""
|
||||
|
||||
def sign_number(self, number, entropy=None, k=None):
|
||||
order = self.privkey.order
|
||||
r, s = ecdsa.SigningKey.sign_number(self, number, entropy, k)
|
||||
if s > order / 2:
|
||||
s = order - s
|
||||
return r, s
|
||||
|
||||
|
||||
class PrivateKey(_KeyBase):
|
||||
"""A BIP32 private key."""
|
||||
|
||||
HARDENED = 1 << 31
|
||||
|
||||
def __init__(self, coin, privkey, chain_code, n, depth, parent=None):
|
||||
super(PrivateKey, self).__init__(coin, chain_code, n, depth, parent)
|
||||
if isinstance(privkey, ecdsa.SigningKey):
|
||||
self.signing_key = privkey
|
||||
else:
|
||||
self.signing_key = self._signing_key_from_privkey(privkey)
|
||||
|
||||
@classmethod
|
||||
def _signing_key_from_privkey(cls, private_key):
|
||||
""" Converts a 32-byte private key into an ecdsa.SigningKey object. """
|
||||
exponent = cls._private_key_secret_exponent(private_key)
|
||||
return LowSValueSigningKey.from_secret_exponent(exponent, curve=cls.CURVE)
|
||||
|
||||
@classmethod
|
||||
def _private_key_secret_exponent(cls, private_key):
|
||||
""" Return the private key as a secret exponent if it is a valid private key. """
|
||||
if not isinstance(private_key, (bytes, bytearray)):
|
||||
raise TypeError('private key must be raw bytes')
|
||||
if len(private_key) != 32:
|
||||
raise ValueError('private key must be 32 bytes')
|
||||
exponent = bytes_to_int(private_key)
|
||||
if not 1 <= exponent < cls.CURVE.order:
|
||||
raise ValueError('private key represents an invalid exponent')
|
||||
return exponent
|
||||
|
||||
@classmethod
|
||||
def from_seed(cls, coin, seed):
|
||||
# This hard-coded message string seems to be coin-independent...
|
||||
hmac = hmac_sha512(b'Bitcoin seed', seed)
|
||||
privkey, chain_code = hmac[:32], hmac[32:]
|
||||
return cls(coin, privkey, chain_code, 0, 0)
|
||||
|
||||
@cachedproperty
|
||||
def private_key_bytes(self):
|
||||
""" Return the serialized private key (no leading zero byte). """
|
||||
return _exponent_to_bytes(self.secret_exponent())
|
||||
|
||||
@cachedproperty
|
||||
def public_key(self):
|
||||
""" Return the corresponding extended public key. """
|
||||
verifying_key = self.signing_key.get_verifying_key()
|
||||
parent_pubkey = self.parent.public_key if self.parent else None
|
||||
return PubKey(self.coin, verifying_key, self.chain_code, self.n, self.depth,
|
||||
parent_pubkey)
|
||||
|
||||
def ec_point(self):
|
||||
return self.public_key.ec_point()
|
||||
|
||||
def secret_exponent(self):
|
||||
""" Return the private key as a secret exponent. """
|
||||
return self.signing_key.privkey.secret_multiplier
|
||||
|
||||
def wif(self):
|
||||
""" Return the private key encoded in Wallet Import Format. """
|
||||
return self.coin.private_key_to_wif(self.private_key_bytes)
|
||||
|
||||
def address(self):
|
||||
""" The public key as a P2PKH address. """
|
||||
return self.public_key.address
|
||||
|
||||
def child(self, n):
|
||||
""" Return the derived child extended private key at index N."""
|
||||
if not 0 <= n < (1 << 32):
|
||||
raise ValueError('invalid BIP32 private key child number')
|
||||
|
||||
if n >= self.HARDENED:
|
||||
serkey = b'\0' + self.private_key_bytes
|
||||
else:
|
||||
serkey = self.public_key.pubkey_bytes
|
||||
|
||||
msg = serkey + struct.pack('>I', n)
|
||||
L, R = self._hmac_sha512(msg)
|
||||
|
||||
curve = self.CURVE
|
||||
L = bytes_to_int(L)
|
||||
exponent = (L + bytes_to_int(self.private_key_bytes)) % curve.order
|
||||
if exponent == 0 or L >= curve.order:
|
||||
raise DerivationError
|
||||
|
||||
privkey = _exponent_to_bytes(exponent)
|
||||
|
||||
return PrivateKey(self.coin, privkey, R, n, self.depth + 1, self)
|
||||
|
||||
def sign(self, data):
|
||||
""" Produce a signature for piece of data by double hashing it and signing the hash. """
|
||||
key = self.signing_key
|
||||
digest = double_sha256(data)
|
||||
return key.sign_digest_deterministic(digest, hashlib.sha256, ecdsa.util.sigencode_der)
|
||||
|
||||
def identifier(self):
|
||||
"""Return the key's identifier as 20 bytes."""
|
||||
return self.public_key.identifier()
|
||||
|
||||
def extended_key(self):
|
||||
"""Return a raw extended private key."""
|
||||
return self._extended_key(
|
||||
self.coin.extended_private_key_prefix,
|
||||
b'\0' + self.private_key_bytes
|
||||
)
|
||||
|
||||
|
||||
def _exponent_to_bytes(exponent):
|
||||
"""Convert an exponent to 32 big-endian bytes"""
|
||||
return (int2byte(0)*32 + int_to_bytes(exponent))[-32:]
|
||||
|
||||
|
||||
def _from_extended_key(coin, ekey):
|
||||
"""Return a PubKey or PrivateKey from an extended key raw bytes."""
|
||||
if not isinstance(ekey, (bytes, bytearray)):
|
||||
raise TypeError('extended key must be raw bytes')
|
||||
if len(ekey) != 78:
|
||||
raise ValueError('extended key must have length 78')
|
||||
|
||||
depth = indexbytes(ekey, 4)
|
||||
fingerprint = ekey[5:9] # Not used
|
||||
n, = struct.unpack('>I', ekey[9:13])
|
||||
chain_code = ekey[13:45]
|
||||
|
||||
if ekey[:4] == coin.extended_public_key_prefix:
|
||||
pubkey = ekey[45:]
|
||||
key = PubKey(coin, pubkey, chain_code, n, depth)
|
||||
elif ekey[:4] == coin.extended_private_key_prefix:
|
||||
if indexbytes(ekey, 45) != 0:
|
||||
raise ValueError('invalid extended private key prefix byte')
|
||||
privkey = ekey[46:]
|
||||
key = PrivateKey(coin, privkey, chain_code, n, depth)
|
||||
else:
|
||||
raise ValueError('version bytes unrecognised')
|
||||
|
||||
return key
|
||||
|
||||
|
||||
def from_extended_key_string(coin, ekey_str):
|
||||
"""Given an extended key string, such as
|
||||
|
||||
xpub6BsnM1W2Y7qLMiuhi7f7dbAwQZ5Cz5gYJCRzTNainXzQXYjFwtuQXHd
|
||||
3qfi3t3KJtHxshXezfjft93w4UE7BGMtKwhqEHae3ZA7d823DVrL
|
||||
|
||||
return a PubKey or PrivateKey.
|
||||
"""
|
||||
return _from_extended_key(coin, Base58.decode_check(ekey_str))
|
1
torba/coin/__init__.py
Normal file
1
torba/coin/__init__.py
Normal file
|
@ -0,0 +1 @@
|
|||
__path__ = __import__('pkgutil').extend_path(__path__, __name__)
|
43
torba/coin/btc.py
Normal file
43
torba/coin/btc.py
Normal file
|
@ -0,0 +1,43 @@
|
|||
from six import int2byte
|
||||
from binascii import unhexlify
|
||||
from torba.baseledger import BaseLedger
|
||||
from torba.basenetwork import BaseNetwork
|
||||
from torba.basescript import BaseInputScript, BaseOutputScript
|
||||
from torba.basetransaction import BaseTransaction, BaseInput, BaseOutput
|
||||
from torba.basecoin import BaseCoin
|
||||
|
||||
|
||||
class Ledger(BaseLedger):
|
||||
network_class = BaseNetwork
|
||||
|
||||
|
||||
class Input(BaseInput):
|
||||
script_class = BaseInputScript
|
||||
|
||||
|
||||
class Output(BaseOutput):
|
||||
script_class = BaseOutputScript
|
||||
|
||||
|
||||
class Transaction(BaseTransaction):
|
||||
input_class = Input
|
||||
output_class = Output
|
||||
|
||||
|
||||
class BTC(BaseCoin):
|
||||
name = 'Bitcoin'
|
||||
symbol = 'BTC'
|
||||
network = 'mainnet'
|
||||
|
||||
ledger_class = Ledger
|
||||
transaction_class = Transaction
|
||||
|
||||
pubkey_address_prefix = int2byte(0x00)
|
||||
script_address_prefix = int2byte(0x05)
|
||||
extended_public_key_prefix = unhexlify('0488b21e')
|
||||
extended_private_key_prefix = unhexlify('0488ade4')
|
||||
|
||||
default_fee_per_byte = 50
|
||||
|
||||
def __init__(self, ledger, fee_per_byte=default_fee_per_byte):
|
||||
super(BTC, self).__init__(ledger, fee_per_byte)
|
95
torba/coinselection.py
Normal file
95
torba/coinselection.py
Normal file
|
@ -0,0 +1,95 @@
|
|||
import six
|
||||
from random import Random
|
||||
from typing import List
|
||||
|
||||
from torba.basetransaction import BaseOutputAmountEstimator
|
||||
|
||||
MAXIMUM_TRIES = 100000
|
||||
|
||||
|
||||
class CoinSelector:
|
||||
|
||||
def __init__(self, txos, target, cost_of_change, seed=None):
|
||||
# type: (List[BaseOutputAmountEstimator], int, int, str) -> None
|
||||
self.txos = txos
|
||||
self.target = target
|
||||
self.cost_of_change = cost_of_change
|
||||
self.exact_match = False
|
||||
self.tries = 0
|
||||
self.available = sum(c.effective_amount for c in self.txos)
|
||||
self.random = Random(seed)
|
||||
if six.PY3 and seed is not None:
|
||||
self.random.seed(seed, version=1)
|
||||
|
||||
def select(self):
|
||||
if not self.txos:
|
||||
return
|
||||
if self.target > self.available:
|
||||
return
|
||||
return self.branch_and_bound() or self.single_random_draw()
|
||||
|
||||
def branch_and_bound(self):
|
||||
# see bitcoin implementation for more info:
|
||||
# https://github.com/bitcoin/bitcoin/blob/master/src/wallet/coinselection.cpp
|
||||
|
||||
self.txos.sort(reverse=True)
|
||||
|
||||
current_value = 0
|
||||
current_available_value = self.available
|
||||
current_selection = []
|
||||
best_waste = self.cost_of_change
|
||||
best_selection = []
|
||||
|
||||
while self.tries < MAXIMUM_TRIES:
|
||||
self.tries += 1
|
||||
|
||||
backtrack = False
|
||||
if current_value + current_available_value < self.target or \
|
||||
current_value > self.target + self.cost_of_change:
|
||||
backtrack = True
|
||||
elif current_value >= self.target:
|
||||
new_waste = current_value - self.target
|
||||
if new_waste <= best_waste:
|
||||
best_waste = new_waste
|
||||
best_selection = current_selection[:]
|
||||
backtrack = True
|
||||
|
||||
if backtrack:
|
||||
while current_selection and not current_selection[-1]:
|
||||
current_selection.pop()
|
||||
current_available_value += self.txos[len(current_selection)].effective_amount
|
||||
|
||||
if not current_selection:
|
||||
break
|
||||
|
||||
current_selection[-1] = False
|
||||
utxo = self.txos[len(current_selection) - 1]
|
||||
current_value -= utxo.effective_amount
|
||||
|
||||
else:
|
||||
utxo = self.txos[len(current_selection)]
|
||||
current_available_value -= utxo.effective_amount
|
||||
previous_utxo = self.txos[len(current_selection) - 1] if current_selection else None
|
||||
if current_selection and not current_selection[-1] and \
|
||||
utxo.effective_amount == previous_utxo.effective_amount and \
|
||||
utxo.fee == previous_utxo.fee:
|
||||
current_selection.append(False)
|
||||
else:
|
||||
current_selection.append(True)
|
||||
current_value += utxo.effective_amount
|
||||
|
||||
if best_selection:
|
||||
self.exact_match = True
|
||||
return [
|
||||
self.txos[i] for i, include in enumerate(best_selection) if include
|
||||
]
|
||||
|
||||
def single_random_draw(self):
|
||||
self.random.shuffle(self.txos, self.random.random)
|
||||
selection = []
|
||||
amount = 0
|
||||
for coin in self.txos:
|
||||
selection.append(coin)
|
||||
amount += coin.effective_amount
|
||||
if amount >= self.target+self.cost_of_change:
|
||||
return selection
|
3
torba/constants.py
Normal file
3
torba/constants.py
Normal file
|
@ -0,0 +1,3 @@
|
|||
|
||||
CENT = 1000000
|
||||
COIN = 100*CENT
|
180
torba/hash.py
Normal file
180
torba/hash.py
Normal file
|
@ -0,0 +1,180 @@
|
|||
# Copyright (c) 2016-2017, Neil Booth
|
||||
# Copyright (c) 2018, LBRY Inc.
|
||||
#
|
||||
# All rights reserved.
|
||||
#
|
||||
# See the file "LICENCE" for information about the copyright
|
||||
# and warranty status of this software.
|
||||
|
||||
""" Cryptography hash functions and related classes. """
|
||||
|
||||
import os
|
||||
import six
|
||||
import base64
|
||||
import hashlib
|
||||
import hmac
|
||||
from binascii import hexlify, unhexlify
|
||||
from cryptography.hazmat.primitives.ciphers import Cipher, modes
|
||||
from cryptography.hazmat.primitives.ciphers.algorithms import AES
|
||||
from cryptography.hazmat.primitives.padding import PKCS7
|
||||
from cryptography.hazmat.backends import default_backend
|
||||
|
||||
from torba.util import bytes_to_int, int_to_bytes
|
||||
|
||||
_sha256 = hashlib.sha256
|
||||
_sha512 = hashlib.sha512
|
||||
_new_hash = hashlib.new
|
||||
_new_hmac = hmac.new
|
||||
|
||||
|
||||
def sha256(x):
|
||||
""" Simple wrapper of hashlib sha256. """
|
||||
return _sha256(x).digest()
|
||||
|
||||
|
||||
def sha512(x):
|
||||
""" Simple wrapper of hashlib sha512. """
|
||||
return _sha512(x).digest()
|
||||
|
||||
|
||||
def ripemd160(x):
|
||||
""" Simple wrapper of hashlib ripemd160. """
|
||||
h = _new_hash('ripemd160')
|
||||
h.update(x)
|
||||
return h.digest()
|
||||
|
||||
|
||||
def pow_hash(x):
|
||||
r = sha512(double_sha256(x))
|
||||
r1 = ripemd160(r[:len(r) // 2])
|
||||
r2 = ripemd160(r[len(r) // 2:])
|
||||
r3 = double_sha256(r1 + r2)
|
||||
return r3
|
||||
|
||||
|
||||
def double_sha256(x):
|
||||
""" SHA-256 of SHA-256, as used extensively in bitcoin. """
|
||||
return sha256(sha256(x))
|
||||
|
||||
|
||||
def hmac_sha512(key, msg):
|
||||
""" Use SHA-512 to provide an HMAC. """
|
||||
return _new_hmac(key, msg, _sha512).digest()
|
||||
|
||||
|
||||
def hash160(x):
|
||||
""" RIPEMD-160 of SHA-256.
|
||||
Used to make bitcoin addresses from pubkeys. """
|
||||
return ripemd160(sha256(x))
|
||||
|
||||
|
||||
def hash_to_hex_str(x):
|
||||
""" Convert a big-endian binary hash to displayed hex string.
|
||||
Display form of a binary hash is reversed and converted to hex. """
|
||||
return hexlify(reversed(x))
|
||||
|
||||
|
||||
def hex_str_to_hash(x):
|
||||
""" Convert a displayed hex string to a binary hash. """
|
||||
return reversed(unhexlify(x))
|
||||
|
||||
|
||||
def aes_encrypt(secret, value):
|
||||
key = double_sha256(secret)
|
||||
init_vector = os.urandom(16)
|
||||
encryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).encryptor()
|
||||
padder = PKCS7(AES.block_size).padder()
|
||||
padded_data = padder.update(value) + padder.finalize()
|
||||
encrypted_data2 = encryptor.update(padded_data) + encryptor.finalize()
|
||||
return base64.b64encode(encrypted_data2)
|
||||
|
||||
|
||||
def aes_decrypt(secret, value):
|
||||
data = base64.b64decode(value)
|
||||
key = double_sha256(secret)
|
||||
init_vector, data = data[:16], data[16:]
|
||||
decryptor = Cipher(AES(key), modes.CBC(init_vector), default_backend()).decryptor()
|
||||
unpadder = PKCS7(AES.block_size).unpadder()
|
||||
result = unpadder.update(decryptor.update(data)) + unpadder.finalize()
|
||||
return result
|
||||
|
||||
|
||||
class Base58Error(Exception):
|
||||
""" Exception used for Base58 errors. """
|
||||
|
||||
|
||||
class Base58(object):
|
||||
""" Class providing base 58 functionality. """
|
||||
|
||||
chars = u'123456789ABCDEFGHJKLMNPQRSTUVWXYZabcdefghijkmnopqrstuvwxyz'
|
||||
assert len(chars) == 58
|
||||
char_map = {c: n for n, c in enumerate(chars)}
|
||||
|
||||
@classmethod
|
||||
def char_value(cls, c):
|
||||
val = cls.char_map.get(c)
|
||||
if val is None:
|
||||
raise Base58Error('invalid base 58 character "{}"'.format(c))
|
||||
return val
|
||||
|
||||
@classmethod
|
||||
def decode(cls, txt):
|
||||
""" Decodes txt into a big-endian bytearray. """
|
||||
if isinstance(txt, six.binary_type):
|
||||
txt = txt.decode()
|
||||
|
||||
if not isinstance(txt, six.text_type):
|
||||
raise TypeError('a string is required')
|
||||
|
||||
if not txt:
|
||||
raise Base58Error('string cannot be empty')
|
||||
|
||||
value = 0
|
||||
for c in txt:
|
||||
value = value * 58 + cls.char_value(c)
|
||||
|
||||
result = int_to_bytes(value)
|
||||
|
||||
# Prepend leading zero bytes if necessary
|
||||
count = 0
|
||||
for c in txt:
|
||||
if c != u'1':
|
||||
break
|
||||
count += 1
|
||||
if count:
|
||||
result = six.int2byte(0) * count + result
|
||||
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def encode(cls, be_bytes):
|
||||
"""Converts a big-endian bytearray into a base58 string."""
|
||||
value = bytes_to_int(be_bytes)
|
||||
|
||||
txt = u''
|
||||
while value:
|
||||
value, mod = divmod(value, 58)
|
||||
txt += cls.chars[mod]
|
||||
|
||||
for byte in be_bytes:
|
||||
if byte != 0:
|
||||
break
|
||||
txt += u'1'
|
||||
|
||||
return txt[::-1].encode()
|
||||
|
||||
@classmethod
|
||||
def decode_check(cls, txt, hash_fn=double_sha256):
|
||||
""" Decodes a Base58Check-encoded string to a payload. The version prefixes it. """
|
||||
be_bytes = cls.decode(txt)
|
||||
result, check = be_bytes[:-4], be_bytes[-4:]
|
||||
if check != hash_fn(result)[:4]:
|
||||
raise Base58Error('invalid base 58 checksum for {}'.format(txt))
|
||||
return result
|
||||
|
||||
@classmethod
|
||||
def encode_check(cls, payload, hash_fn=double_sha256):
|
||||
""" Encodes a payload bytearray (which includes the version byte(s))
|
||||
into a Base58Check string."""
|
||||
be_bytes = payload + hash_fn(payload)[:4]
|
||||
return cls.encode(be_bytes)
|
83
torba/manager.py
Normal file
83
torba/manager.py
Normal file
|
@ -0,0 +1,83 @@
|
|||
import functools
|
||||
from typing import List, Dict, Type
|
||||
from twisted.internet import defer
|
||||
|
||||
from torba.account import AccountsView
|
||||
from torba.basecoin import CoinRegistry
|
||||
from torba.baseledger import BaseLedger
|
||||
from torba.wallet import Wallet, WalletStorage
|
||||
|
||||
|
||||
class WalletManager:
|
||||
|
||||
def __init__(self, wallets=None, ledgers=None):
|
||||
self.wallets = wallets or [] # type: List[Wallet]
|
||||
self.ledgers = ledgers or {} # type: Dict[Type[BaseLedger],BaseLedger]
|
||||
self.running = False
|
||||
|
||||
@classmethod
|
||||
def from_config(cls, config):
|
||||
wallets = []
|
||||
manager = cls(wallets)
|
||||
for coin_id, ledger_config in config.get('ledgers', {}).items():
|
||||
manager.get_or_create_ledger(coin_id, ledger_config)
|
||||
for wallet_path in config.get('wallets', []):
|
||||
wallet_storage = WalletStorage(wallet_path)
|
||||
wallet = Wallet.from_storage(wallet_storage, manager)
|
||||
wallets.append(wallet)
|
||||
return manager
|
||||
|
||||
def get_or_create_ledger(self, coin_id, ledger_config=None):
|
||||
coin_class = CoinRegistry.get_coin_class(coin_id)
|
||||
ledger_class = coin_class.ledger_class
|
||||
ledger = self.ledgers.get(ledger_class)
|
||||
if ledger is None:
|
||||
ledger = ledger_class(self.get_accounts_view(coin_class), ledger_config or {})
|
||||
self.ledgers[ledger_class] = ledger
|
||||
return ledger
|
||||
|
||||
@property
|
||||
def default_wallet(self):
|
||||
for wallet in self.wallets:
|
||||
return wallet
|
||||
|
||||
@property
|
||||
def default_account(self):
|
||||
for wallet in self.wallets:
|
||||
return wallet.default_account
|
||||
|
||||
def get_accounts(self, coin_class):
|
||||
for wallet in self.wallets:
|
||||
for account in wallet.accounts:
|
||||
if account.coin.__class__ is coin_class:
|
||||
yield account
|
||||
|
||||
def get_accounts_view(self, coin_class):
|
||||
return AccountsView(
|
||||
functools.partial(self.get_accounts, coin_class)
|
||||
)
|
||||
|
||||
def create_wallet(self, path, coin_class):
|
||||
storage = WalletStorage(path)
|
||||
wallet = Wallet.from_storage(storage, self)
|
||||
self.wallets.append(wallet)
|
||||
self.create_account(wallet, coin_class)
|
||||
return wallet
|
||||
|
||||
def create_account(self, wallet, coin_class):
|
||||
ledger = self.get_or_create_ledger(coin_class.get_id())
|
||||
return wallet.generate_account(ledger)
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def start_ledgers(self):
|
||||
self.running = True
|
||||
yield defer.DeferredList([
|
||||
l.start() for l in self.ledgers.values()
|
||||
])
|
||||
|
||||
@defer.inlineCallbacks
|
||||
def stop_ledgers(self):
|
||||
yield defer.DeferredList([
|
||||
l.stop() for l in self.ledgers.values()
|
||||
])
|
||||
self.running = False
|
163
torba/mnemonic.py
Normal file
163
torba/mnemonic.py
Normal file
|
@ -0,0 +1,163 @@
|
|||
# Copyright (C) 2014 Thomas Voegtlin
|
||||
# Copyright (C) 2018 LBRY Inc.
|
||||
|
||||
import os
|
||||
import io
|
||||
import hmac
|
||||
import math
|
||||
import hashlib
|
||||
import unicodedata
|
||||
import string
|
||||
from binascii import hexlify
|
||||
|
||||
import ecdsa
|
||||
import pbkdf2
|
||||
|
||||
from torba.hash import hmac_sha512
|
||||
|
||||
# The hash of the mnemonic seed must begin with this
|
||||
SEED_PREFIX = b'01' # Standard wallet
|
||||
SEED_PREFIX_2FA = b'101' # Two-factor authentication
|
||||
SEED_PREFIX_SW = b'100' # Segwit wallet
|
||||
|
||||
# http://www.asahi-net.or.jp/~ax2s-kmtn/ref/unicode/e_asia.html
|
||||
CJK_INTERVALS = [
|
||||
(0x4E00, 0x9FFF, 'CJK Unified Ideographs'),
|
||||
(0x3400, 0x4DBF, 'CJK Unified Ideographs Extension A'),
|
||||
(0x20000, 0x2A6DF, 'CJK Unified Ideographs Extension B'),
|
||||
(0x2A700, 0x2B73F, 'CJK Unified Ideographs Extension C'),
|
||||
(0x2B740, 0x2B81F, 'CJK Unified Ideographs Extension D'),
|
||||
(0xF900, 0xFAFF, 'CJK Compatibility Ideographs'),
|
||||
(0x2F800, 0x2FA1D, 'CJK Compatibility Ideographs Supplement'),
|
||||
(0x3190, 0x319F, 'Kanbun'),
|
||||
(0x2E80, 0x2EFF, 'CJK Radicals Supplement'),
|
||||
(0x2F00, 0x2FDF, 'CJK Radicals'),
|
||||
(0x31C0, 0x31EF, 'CJK Strokes'),
|
||||
(0x2FF0, 0x2FFF, 'Ideographic Description Characters'),
|
||||
(0xE0100, 0xE01EF, 'Variation Selectors Supplement'),
|
||||
(0x3100, 0x312F, 'Bopomofo'),
|
||||
(0x31A0, 0x31BF, 'Bopomofo Extended'),
|
||||
(0xFF00, 0xFFEF, 'Halfwidth and Fullwidth Forms'),
|
||||
(0x3040, 0x309F, 'Hiragana'),
|
||||
(0x30A0, 0x30FF, 'Katakana'),
|
||||
(0x31F0, 0x31FF, 'Katakana Phonetic Extensions'),
|
||||
(0x1B000, 0x1B0FF, 'Kana Supplement'),
|
||||
(0xAC00, 0xD7AF, 'Hangul Syllables'),
|
||||
(0x1100, 0x11FF, 'Hangul Jamo'),
|
||||
(0xA960, 0xA97F, 'Hangul Jamo Extended A'),
|
||||
(0xD7B0, 0xD7FF, 'Hangul Jamo Extended B'),
|
||||
(0x3130, 0x318F, 'Hangul Compatibility Jamo'),
|
||||
(0xA4D0, 0xA4FF, 'Lisu'),
|
||||
(0x16F00, 0x16F9F, 'Miao'),
|
||||
(0xA000, 0xA48F, 'Yi Syllables'),
|
||||
(0xA490, 0xA4CF, 'Yi Radicals'),
|
||||
]
|
||||
|
||||
|
||||
def is_cjk(c):
|
||||
n = ord(c)
|
||||
for start, end, name in CJK_INTERVALS:
|
||||
if start <= n <= end:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def normalize_text(seed):
|
||||
seed = unicodedata.normalize('NFKD', seed)
|
||||
seed = seed.lower()
|
||||
# remove accents
|
||||
seed = u''.join([c for c in seed if not unicodedata.combining(c)])
|
||||
# normalize whitespaces
|
||||
seed = u' '.join(seed.split())
|
||||
# remove whitespaces between CJK
|
||||
seed = u''.join([
|
||||
seed[i] for i in range(len(seed))
|
||||
if not (seed[i] in string.whitespace and is_cjk(seed[i-1]) and is_cjk(seed[i+1]))
|
||||
])
|
||||
return seed
|
||||
|
||||
|
||||
def load_words(filename):
|
||||
path = os.path.join(os.path.dirname(__file__), 'words', filename)
|
||||
with io.open(path, 'r', encoding='utf-8') as f:
|
||||
s = f.read().strip()
|
||||
s = unicodedata.normalize('NFKD', s)
|
||||
lines = s.split('\n')
|
||||
words = []
|
||||
for line in lines:
|
||||
line = line.split('#')[0]
|
||||
line = line.strip(' \r')
|
||||
assert ' ' not in line
|
||||
if line:
|
||||
words.append(line)
|
||||
return words
|
||||
|
||||
|
||||
file_names = {
|
||||
'en': 'english.txt',
|
||||
'es': 'spanish.txt',
|
||||
'ja': 'japanese.txt',
|
||||
'pt': 'portuguese.txt',
|
||||
'zh': 'chinese_simplified.txt'
|
||||
}
|
||||
|
||||
|
||||
class Mnemonic(object):
|
||||
# Seed derivation no longer follows BIP39
|
||||
# Mnemonic phrase uses a hash based checksum, instead of a words-dependent checksum
|
||||
|
||||
def __init__(self, lang='en'):
|
||||
filename = file_names.get(lang, 'english.txt')
|
||||
self.words = load_words(filename)
|
||||
|
||||
@classmethod
|
||||
def mnemonic_to_seed(self, mnemonic, passphrase=u''):
|
||||
PBKDF2_ROUNDS = 2048
|
||||
mnemonic = normalize_text(mnemonic)
|
||||
passphrase = normalize_text(passphrase)
|
||||
return pbkdf2.PBKDF2(mnemonic, passphrase, iterations=PBKDF2_ROUNDS, macmodule=hmac, digestmodule=hashlib.sha512).read(64)
|
||||
|
||||
def mnemonic_encode(self, i):
|
||||
n = len(self.words)
|
||||
words = []
|
||||
while i:
|
||||
x = i%n
|
||||
i = i//n
|
||||
words.append(self.words[x])
|
||||
return ' '.join(words)
|
||||
|
||||
def mnemonic_decode(self, seed):
|
||||
n = len(self.words)
|
||||
words = seed.split()
|
||||
i = 0
|
||||
while words:
|
||||
w = words.pop()
|
||||
k = self.words.index(w)
|
||||
i = i*n + k
|
||||
return i
|
||||
|
||||
def make_seed(self, prefix=SEED_PREFIX, num_bits=132):
|
||||
# increase num_bits in order to obtain a uniform distribution for the last word
|
||||
bpw = math.log(len(self.words), 2)
|
||||
# rounding
|
||||
n = int(math.ceil(num_bits/bpw) * bpw)
|
||||
entropy = 1
|
||||
while entropy < pow(2, n - bpw):
|
||||
# try again if seed would not contain enough words
|
||||
entropy = ecdsa.util.randrange(pow(2, n))
|
||||
nonce = 0
|
||||
while True:
|
||||
nonce += 1
|
||||
i = entropy + nonce
|
||||
seed = self.mnemonic_encode(i)
|
||||
if i != self.mnemonic_decode(seed):
|
||||
raise Exception('Cannot extract same entropy from mnemonic!')
|
||||
if is_new_seed(seed, prefix):
|
||||
break
|
||||
return seed
|
||||
|
||||
|
||||
def is_new_seed(seed, prefix):
|
||||
seed = normalize_text(seed)
|
||||
seed_hash = hexlify(hmac_sha512(b"seed version", seed.encode('utf8')))
|
||||
return seed_hash.startswith(prefix)
|
96
torba/msqr.py
Normal file
96
torba/msqr.py
Normal file
|
@ -0,0 +1,96 @@
|
|||
# from http://eli.thegreenplace.net/2009/03/07/computing-modular-square-roots-in-python/
|
||||
|
||||
|
||||
def modular_sqrt(a, p):
|
||||
""" Find a quadratic residue (mod p) of 'a'. p
|
||||
must be an odd prime.
|
||||
|
||||
Solve the congruence of the form:
|
||||
x^2 = a (mod p)
|
||||
And returns x. Note that p - x is also a root.
|
||||
|
||||
0 is returned is no square root exists for
|
||||
these a and p.
|
||||
|
||||
The Tonelli-Shanks algorithm is used (except
|
||||
for some simple cases in which the solution
|
||||
is known from an identity). This algorithm
|
||||
runs in polynomial time (unless the
|
||||
generalized Riemann hypothesis is false).
|
||||
"""
|
||||
# Simple cases
|
||||
#
|
||||
if legendre_symbol(a, p) != 1:
|
||||
return 0
|
||||
elif a == 0:
|
||||
return 0
|
||||
elif p == 2:
|
||||
return p
|
||||
elif p % 4 == 3:
|
||||
return pow(a, (p + 1) / 4, p)
|
||||
|
||||
# Partition p-1 to s * 2^e for an odd s (i.e.
|
||||
# reduce all the powers of 2 from p-1)
|
||||
#
|
||||
s = p - 1
|
||||
e = 0
|
||||
while s % 2 == 0:
|
||||
s /= 2
|
||||
e += 1
|
||||
|
||||
# Find some 'n' with a legendre symbol n|p = -1.
|
||||
# Shouldn't take long.
|
||||
#
|
||||
n = 2
|
||||
while legendre_symbol(n, p) != -1:
|
||||
n += 1
|
||||
|
||||
# Here be dragons!
|
||||
# Read the paper "Square roots from 1; 24, 51,
|
||||
# 10 to Dan Shanks" by Ezra Brown for more
|
||||
# information
|
||||
#
|
||||
|
||||
# x is a guess of the square root that gets better
|
||||
# with each iteration.
|
||||
# b is the "fudge factor" - by how much we're off
|
||||
# with the guess. The invariant x^2 = ab (mod p)
|
||||
# is maintained throughout the loop.
|
||||
# g is used for successive powers of n to update
|
||||
# both a and b
|
||||
# r is the exponent - decreases with each update
|
||||
#
|
||||
x = pow(a, (s + 1) / 2, p)
|
||||
b = pow(a, s, p)
|
||||
g = pow(n, s, p)
|
||||
r = e
|
||||
|
||||
while True:
|
||||
t = b
|
||||
m = 0
|
||||
for m in xrange(r):
|
||||
if t == 1:
|
||||
break
|
||||
t = pow(t, 2, p)
|
||||
|
||||
if m == 0:
|
||||
return x
|
||||
|
||||
gs = pow(g, 2 ** (r - m - 1), p)
|
||||
g = (gs * gs) % p
|
||||
x = (x * gs) % p
|
||||
b = (b * g) % p
|
||||
r = m
|
||||
|
||||
|
||||
def legendre_symbol(a, p):
|
||||
""" Compute the Legendre symbol a|p using
|
||||
Euler's criterion. p is a prime, a is
|
||||
relatively prime to p (if p divides
|
||||
a, then a|p = 0)
|
||||
|
||||
Returns 1 if a has a square root modulo
|
||||
p, -1 otherwise.
|
||||
"""
|
||||
ls = pow(a, (p - 1) / 2, p)
|
||||
return -1 if ls == p - 1 else ls
|
144
torba/stream.py
Normal file
144
torba/stream.py
Normal file
|
@ -0,0 +1,144 @@
|
|||
from twisted.internet.defer import Deferred, DeferredLock, maybeDeferred, inlineCallbacks
|
||||
from twisted.python.failure import Failure
|
||||
|
||||
|
||||
def execute_serially(f):
|
||||
_lock = DeferredLock()
|
||||
|
||||
@inlineCallbacks
|
||||
def allow_only_one_at_a_time(*args, **kwargs):
|
||||
yield _lock.acquire()
|
||||
allow_only_one_at_a_time.is_running = True
|
||||
try:
|
||||
yield maybeDeferred(f, *args, **kwargs)
|
||||
finally:
|
||||
allow_only_one_at_a_time.is_running = False
|
||||
_lock.release()
|
||||
|
||||
allow_only_one_at_a_time.is_running = False
|
||||
return allow_only_one_at_a_time
|
||||
|
||||
|
||||
class BroadcastSubscription:
|
||||
|
||||
def __init__(self, controller, on_data, on_error, on_done):
|
||||
self._controller = controller
|
||||
self._previous = self._next = None
|
||||
self._on_data = on_data
|
||||
self._on_error = on_error
|
||||
self._on_done = on_done
|
||||
self.is_paused = False
|
||||
self.is_canceled = False
|
||||
self.is_closed = False
|
||||
|
||||
def pause(self):
|
||||
self.is_paused = True
|
||||
|
||||
def resume(self):
|
||||
self.is_paused = False
|
||||
|
||||
def cancel(self):
|
||||
self._controller._cancel(self)
|
||||
self.is_canceled = True
|
||||
|
||||
@property
|
||||
def can_fire(self):
|
||||
return not any((self.is_paused, self.is_canceled, self.is_closed))
|
||||
|
||||
def _add(self, data):
|
||||
if self.can_fire and self._on_data is not None:
|
||||
self._on_data(data)
|
||||
|
||||
def _add_error(self, error, traceback):
|
||||
if self.can_fire and self._on_error is not None:
|
||||
self._on_error(error, traceback)
|
||||
|
||||
def _close(self):
|
||||
if self.can_fire and self._on_done is not None:
|
||||
self._on_done()
|
||||
self.is_closed = True
|
||||
|
||||
|
||||
class StreamController:
|
||||
|
||||
def __init__(self):
|
||||
self.stream = Stream(self)
|
||||
self._first_subscription = None
|
||||
self._last_subscription = None
|
||||
|
||||
@property
|
||||
def has_listener(self):
|
||||
return self._first_subscription is not None
|
||||
|
||||
@property
|
||||
def _iterate_subscriptions(self):
|
||||
next = self._first_subscription
|
||||
while next is not None:
|
||||
subscription = next
|
||||
next = next._next
|
||||
yield subscription
|
||||
|
||||
def add(self, event):
|
||||
for subscription in self._iterate_subscriptions:
|
||||
subscription._add(event)
|
||||
|
||||
def add_error(self, error, traceback):
|
||||
for subscription in self._iterate_subscriptions:
|
||||
subscription._add_error(error, traceback)
|
||||
|
||||
def close(self):
|
||||
for subscription in self._iterate_subscriptions:
|
||||
subscription._close()
|
||||
|
||||
def _cancel(self, subscription):
|
||||
previous = subscription._previous
|
||||
next = subscription._next
|
||||
if previous is None:
|
||||
self._first_subscription = next
|
||||
else:
|
||||
previous._next = next
|
||||
if next is None:
|
||||
self._last_subscription = previous
|
||||
else:
|
||||
next._previous = previous
|
||||
subscription._next = subscription._previous = subscription
|
||||
|
||||
def _listen(self, on_data, on_error, on_done):
|
||||
subscription = BroadcastSubscription(self, on_data, on_error, on_done)
|
||||
old_last = self._last_subscription
|
||||
self._last_subscription = subscription
|
||||
subscription._previous = old_last
|
||||
subscription._next = None
|
||||
if old_last is None:
|
||||
self._first_subscription = subscription
|
||||
else:
|
||||
old_last._next = subscription
|
||||
return subscription
|
||||
|
||||
|
||||
class Stream:
|
||||
|
||||
def __init__(self, controller):
|
||||
self._controller = controller
|
||||
|
||||
def listen(self, on_data, on_error=None, on_done=None):
|
||||
return self._controller._listen(on_data, on_error, on_done)
|
||||
|
||||
@property
|
||||
def first(self):
|
||||
deferred = Deferred()
|
||||
subscription = self.listen(
|
||||
lambda value: self._cancel_and_callback(subscription, deferred, value),
|
||||
lambda error, traceback: self._cancel_and_error(subscription, deferred, error, traceback)
|
||||
)
|
||||
return deferred
|
||||
|
||||
@staticmethod
|
||||
def _cancel_and_callback(subscription, deferred, value):
|
||||
subscription.cancel()
|
||||
deferred.callback(value)
|
||||
|
||||
@staticmethod
|
||||
def _cancel_and_error(subscription, deferred, error, traceback):
|
||||
subscription.cancel()
|
||||
deferred.errback(Failure(error, exc_tb=traceback))
|
60
torba/util.py
Normal file
60
torba/util.py
Normal file
|
@ -0,0 +1,60 @@
|
|||
from binascii import unhexlify, hexlify
|
||||
from collections import Sequence
|
||||
|
||||
|
||||
class ReadOnlyList(Sequence):
|
||||
|
||||
def __init__(self, lst):
|
||||
self.lst = lst
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.lst[key]
|
||||
|
||||
def __len__(self):
|
||||
return len(self.lst)
|
||||
|
||||
|
||||
def subclass_tuple(name, base):
|
||||
return type(name, (base,), {'__slots__': ()})
|
||||
|
||||
|
||||
class cachedproperty(object):
|
||||
|
||||
def __init__(self, f):
|
||||
self.f = f
|
||||
|
||||
def __get__(self, obj, type):
|
||||
obj = obj or type
|
||||
value = self.f(obj)
|
||||
setattr(obj, self.f.__name__, value)
|
||||
return value
|
||||
|
||||
|
||||
def bytes_to_int(be_bytes):
|
||||
""" Interprets a big-endian sequence of bytes as an integer. """
|
||||
return int(hexlify(be_bytes), 16)
|
||||
|
||||
|
||||
def int_to_bytes(value):
|
||||
""" Converts an integer to a big-endian sequence of bytes. """
|
||||
length = (value.bit_length() + 7) // 8
|
||||
h = '%x' % value
|
||||
return unhexlify(('0' * (len(h) % 2) + h).zfill(length * 2))
|
||||
|
||||
|
||||
def rev_hex(s):
|
||||
return hexlify(unhexlify(s)[::-1])
|
||||
|
||||
|
||||
def int_to_hex(i, length=1):
|
||||
s = hex(i)[2:].rstrip('L')
|
||||
s = "0" * (2 * length - len(s)) + s
|
||||
return rev_hex(s)
|
||||
|
||||
|
||||
def hex_to_int(s):
|
||||
return int(b'0x' + hexlify(s[::-1]), 16)
|
||||
|
||||
|
||||
def hash_encode(x):
|
||||
return hexlify(x[::-1])
|
164
torba/wallet.py
Normal file
164
torba/wallet.py
Normal file
|
@ -0,0 +1,164 @@
|
|||
import stat
|
||||
import json
|
||||
import os
|
||||
from typing import List, Dict
|
||||
|
||||
from torba.account import Account
|
||||
from torba.basecoin import CoinRegistry, BaseCoin
|
||||
from torba.baseledger import BaseLedger
|
||||
|
||||
|
||||
def inflate_coin(manager, coin_id, coin_dict):
|
||||
# type: ('WalletManager', str, Dict) -> BaseCoin
|
||||
coin_class = CoinRegistry.get_coin_class(coin_id)
|
||||
ledger = manager.get_or_create_ledger(coin_id)
|
||||
return coin_class(ledger, **coin_dict)
|
||||
|
||||
|
||||
class Wallet:
|
||||
""" The primary role of Wallet is to encapsulate a collection
|
||||
of accounts (seed/private keys) and the spending rules / settings
|
||||
for the coins attached to those accounts. Wallets are represented
|
||||
by physical files on the filesystem.
|
||||
"""
|
||||
|
||||
def __init__(self, name='Wallet', coins=None, accounts=None, storage=None):
|
||||
self.name = name
|
||||
self.coins = coins or [] # type: List[BaseCoin]
|
||||
self.accounts = accounts or [] # type: List[Account]
|
||||
self.storage = storage or WalletStorage()
|
||||
|
||||
def get_or_create_coin(self, ledger, coin_dict=None): # type: (BaseLedger, Dict) -> BaseCoin
|
||||
for coin in self.coins:
|
||||
if coin.__class__ is ledger.coin_class:
|
||||
return coin
|
||||
coin = ledger.coin_class(ledger, **(coin_dict or {}))
|
||||
self.coins.append(coin)
|
||||
return coin
|
||||
|
||||
def generate_account(self, ledger): # type: (BaseLedger) -> Account
|
||||
coin = self.get_or_create_coin(ledger)
|
||||
account = Account.generate(coin, u'torba')
|
||||
self.accounts.append(account)
|
||||
return account
|
||||
|
||||
@classmethod
|
||||
def from_storage(cls, storage, manager): # type: (WalletStorage, 'WalletManager') -> Wallet
|
||||
json_dict = storage.read()
|
||||
|
||||
coins = {}
|
||||
for coin_id, coin_dict in json_dict.get('coins', {}).items():
|
||||
coins[coin_id] = inflate_coin(manager, coin_id, coin_dict)
|
||||
|
||||
accounts = []
|
||||
for account_dict in json_dict.get('accounts', []):
|
||||
coin_id = account_dict['coin']
|
||||
coin = coins.get(coin_id)
|
||||
if coin is None:
|
||||
coin = coins[coin_id] = inflate_coin(manager, coin_id, {})
|
||||
account = Account.from_dict(coin, account_dict)
|
||||
accounts.append(account)
|
||||
|
||||
return cls(
|
||||
name=json_dict.get('name', 'Wallet'),
|
||||
coins=list(coins.values()),
|
||||
accounts=accounts,
|
||||
storage=storage
|
||||
)
|
||||
|
||||
def to_dict(self):
|
||||
return {
|
||||
'name': self.name,
|
||||
'coins': {c.get_id(): c.to_dict() for c in self.coins},
|
||||
'accounts': [a.to_dict() for a in self.accounts]
|
||||
}
|
||||
|
||||
def save(self):
|
||||
self.storage.write(self.to_dict())
|
||||
|
||||
@property
|
||||
def default_account(self):
|
||||
for account in self.accounts:
|
||||
return account
|
||||
|
||||
def get_account_private_key_for_address(self, address):
|
||||
for account in self.accounts:
|
||||
private_key = account.get_private_key_for_address(address)
|
||||
if private_key is not None:
|
||||
return account, private_key
|
||||
|
||||
|
||||
class WalletStorage:
|
||||
|
||||
LATEST_VERSION = 2
|
||||
|
||||
DEFAULT = {
|
||||
'version': LATEST_VERSION,
|
||||
'name': 'Wallet',
|
||||
'coins': {},
|
||||
'accounts': []
|
||||
}
|
||||
|
||||
def __init__(self, path=None, default=None):
|
||||
self.path = path
|
||||
self._default = default or self.DEFAULT.copy()
|
||||
|
||||
@property
|
||||
def default(self):
|
||||
return self._default.copy()
|
||||
|
||||
def read(self):
|
||||
if self.path and os.path.exists(self.path):
|
||||
with open(self.path, "r") as f:
|
||||
json_data = f.read()
|
||||
json_dict = json.loads(json_data)
|
||||
if json_dict.get('version') == self.LATEST_VERSION and \
|
||||
set(json_dict) == set(self._default):
|
||||
return json_dict
|
||||
else:
|
||||
return self.upgrade(json_dict)
|
||||
else:
|
||||
return self.default
|
||||
|
||||
@classmethod
|
||||
def upgrade(cls, json_dict):
|
||||
json_dict = json_dict.copy()
|
||||
|
||||
def _rename_property(old, new):
|
||||
if old in json_dict:
|
||||
json_dict[new] = json_dict[old]
|
||||
del json_dict[old]
|
||||
|
||||
version = json_dict.pop('version', -1)
|
||||
|
||||
if version == 1: # upgrade from version 1 to version 2
|
||||
_rename_property('addr_history', 'history')
|
||||
_rename_property('use_encryption', 'encrypted')
|
||||
_rename_property('gap_limit', 'gap_limit_for_receiving')
|
||||
|
||||
upgraded = cls.DEFAULT
|
||||
upgraded.update(json_dict)
|
||||
return json_dict
|
||||
|
||||
def write(self, json_dict):
|
||||
|
||||
json_data = json.dumps(json_dict, indent=4, sort_keys=True)
|
||||
if self.path is None:
|
||||
return json_data
|
||||
|
||||
temp_path = "%s.tmp.%s" % (self.path, os.getpid())
|
||||
with open(temp_path, "w") as f:
|
||||
f.write(json_data)
|
||||
f.flush()
|
||||
os.fsync(f.fileno())
|
||||
|
||||
if os.path.exists(self.path):
|
||||
mode = os.stat(self.path).st_mode
|
||||
else:
|
||||
mode = stat.S_IREAD | stat.S_IWRITE
|
||||
try:
|
||||
os.rename(temp_path, self.path)
|
||||
except:
|
||||
os.remove(self.path)
|
||||
os.rename(temp_path, self.path)
|
||||
os.chmod(self.path, mode)
|
2048
torba/words/chinese_simplified.txt
Normal file
2048
torba/words/chinese_simplified.txt
Normal file
File diff suppressed because it is too large
Load diff
2048
torba/words/english.txt
Normal file
2048
torba/words/english.txt
Normal file
File diff suppressed because it is too large
Load diff
2048
torba/words/japanese.txt
Normal file
2048
torba/words/japanese.txt
Normal file
File diff suppressed because it is too large
Load diff
1654
torba/words/portuguese.txt
Normal file
1654
torba/words/portuguese.txt
Normal file
File diff suppressed because it is too large
Load diff
2048
torba/words/spanish.txt
Normal file
2048
torba/words/spanish.txt
Normal file
File diff suppressed because it is too large
Load diff
12
tox.ini
Normal file
12
tox.ini
Normal file
|
@ -0,0 +1,12 @@
|
|||
[tox]
|
||||
envlist = py{27,36}
|
||||
|
||||
[testenv]
|
||||
deps =
|
||||
coverage
|
||||
mock
|
||||
|
||||
changedir = {toxinidir}/tests
|
||||
|
||||
commands =
|
||||
coverage run -p --source={envsitepackagesdir}/torba -m unittest discover -v
|
Loading…
Reference in a new issue