commit
aa90257ec7
11 changed files with 32 additions and 36 deletions
|
@ -83,8 +83,6 @@ class BlobExchangeClientProtocol(asyncio.Protocol):
|
||||||
if len(data) > (self.blob.get_length() - self._blob_bytes_received):
|
if len(data) > (self.blob.get_length() - self._blob_bytes_received):
|
||||||
data = data[:(self.blob.get_length() - self._blob_bytes_received)]
|
data = data[:(self.blob.get_length() - self._blob_bytes_received)]
|
||||||
log.warning("got more than asked from %s:%d, probable sendfile bug", self.peer_address, self.peer_port)
|
log.warning("got more than asked from %s:%d, probable sendfile bug", self.peer_address, self.peer_port)
|
||||||
else:
|
|
||||||
data = data
|
|
||||||
self._blob_bytes_received += len(data)
|
self._blob_bytes_received += len(data)
|
||||||
try:
|
try:
|
||||||
self.writer.write(data)
|
self.writer.write(data)
|
||||||
|
|
|
@ -65,10 +65,10 @@ def remove_brackets(key):
|
||||||
def set_kwargs(parsed_args):
|
def set_kwargs(parsed_args):
|
||||||
kwargs = {}
|
kwargs = {}
|
||||||
for key, arg in parsed_args.items():
|
for key, arg in parsed_args.items():
|
||||||
k = None
|
|
||||||
if arg is None:
|
if arg is None:
|
||||||
continue
|
continue
|
||||||
elif key.startswith("--") and remove_brackets(key[2:]) not in kwargs:
|
k = None
|
||||||
|
if key.startswith("--") and remove_brackets(key[2:]) not in kwargs:
|
||||||
k = remove_brackets(key[2:])
|
k = remove_brackets(key[2:])
|
||||||
elif remove_brackets(key) not in kwargs:
|
elif remove_brackets(key) not in kwargs:
|
||||||
k = remove_brackets(key)
|
k = remove_brackets(key)
|
||||||
|
|
|
@ -1,4 +1,5 @@
|
||||||
import os
|
import os
|
||||||
|
import sys
|
||||||
import logging
|
import logging
|
||||||
|
|
||||||
log = logging.getLogger(__name__)
|
log = logging.getLogger(__name__)
|
||||||
|
@ -51,7 +52,6 @@ def migrate_db(conf, start, end):
|
||||||
|
|
||||||
|
|
||||||
def run_migration_script():
|
def run_migration_script():
|
||||||
import sys
|
|
||||||
log_format = "(%(asctime)s)[%(filename)s:%(lineno)s] %(funcName)s(): %(message)s"
|
log_format = "(%(asctime)s)[%(filename)s:%(lineno)s] %(funcName)s(): %(message)s"
|
||||||
logging.basicConfig(level=logging.DEBUG, format=log_format, filename="migrator.log")
|
logging.basicConfig(level=logging.DEBUG, format=log_format, filename="migrator.log")
|
||||||
sys.stdout = open("migrator.out.log", 'w')
|
sys.stdout = open("migrator.out.log", 'w')
|
||||||
|
|
|
@ -125,10 +125,9 @@ def do_migration(conf):
|
||||||
name_metadata_cursor.execute("select txid, n, sd_hash from name_metadata").fetchall()
|
name_metadata_cursor.execute("select txid, n, sd_hash from name_metadata").fetchall()
|
||||||
}
|
}
|
||||||
|
|
||||||
sd_hash_to_stream_hash = {
|
sd_hash_to_stream_hash = dict(
|
||||||
sd_hash: stream_hash for (sd_hash, stream_hash) in
|
|
||||||
lbryfile_cursor.execute("select sd_blob_hash, stream_hash from lbry_file_descriptors").fetchall()
|
lbryfile_cursor.execute("select sd_blob_hash, stream_hash from lbry_file_descriptors").fetchall()
|
||||||
}
|
)
|
||||||
|
|
||||||
stream_hash_to_stream_blobs = {}
|
stream_hash_to_stream_blobs = {}
|
||||||
|
|
||||||
|
|
|
@ -475,7 +475,6 @@ class SQLiteStorage(SQLiteMixin):
|
||||||
os.path.join(binascii.unhexlify(download_directory).decode(),
|
os.path.join(binascii.unhexlify(download_directory).decode(),
|
||||||
binascii.unhexlify(file_name).decode())):
|
binascii.unhexlify(file_name).decode())):
|
||||||
continue
|
continue
|
||||||
else:
|
|
||||||
removed.append((stream_hash,))
|
removed.append((stream_hash,))
|
||||||
if removed:
|
if removed:
|
||||||
transaction.executemany(
|
transaction.executemany(
|
||||||
|
|
|
@ -23,7 +23,7 @@ def get_platform() -> dict:
|
||||||
"build": build_type.BUILD, # CI server sets this during build step
|
"build": build_type.BUILD, # CI server sets this during build step
|
||||||
}
|
}
|
||||||
if p["os_system"] == "Linux":
|
if p["os_system"] == "Linux":
|
||||||
import distro
|
import distro # pylint: disable=import-outside-toplevel
|
||||||
p["distro"] = distro.info()
|
p["distro"] = distro.info()
|
||||||
p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown')
|
p["desktop"] = os.environ.get('XDG_CURRENT_DESKTOP', 'Unknown')
|
||||||
|
|
||||||
|
|
|
@ -2,6 +2,7 @@ import time
|
||||||
import struct
|
import struct
|
||||||
import sqlite3
|
import sqlite3
|
||||||
import logging
|
import logging
|
||||||
|
from operator import itemgetter
|
||||||
from typing import Tuple, List, Dict, Union, Type, Optional
|
from typing import Tuple, List, Dict, Union, Type, Optional
|
||||||
from binascii import unhexlify
|
from binascii import unhexlify
|
||||||
from decimal import Decimal
|
from decimal import Decimal
|
||||||
|
@ -336,7 +337,7 @@ def search(constraints) -> Tuple[List, List, int, int]:
|
||||||
if 'order_by' not in constraints:
|
if 'order_by' not in constraints:
|
||||||
constraints['order_by'] = ["claim_hash"]
|
constraints['order_by'] = ["claim_hash"]
|
||||||
txo_rows = _search(**constraints)
|
txo_rows = _search(**constraints)
|
||||||
channel_hashes = set(txo['channel_hash'] for txo in txo_rows if txo['channel_hash'])
|
channel_hashes = set(filter(None, map(itemgetter('channel_hash'), txo_rows)))
|
||||||
extra_txo_rows = []
|
extra_txo_rows = []
|
||||||
if channel_hashes:
|
if channel_hashes:
|
||||||
extra_txo_rows = _search(
|
extra_txo_rows = _search(
|
||||||
|
|
|
@ -85,7 +85,7 @@ class AddressManager:
|
||||||
class HierarchicalDeterministic(AddressManager):
|
class HierarchicalDeterministic(AddressManager):
|
||||||
""" Implements simple version of Bitcoin Hierarchical Deterministic key management. """
|
""" Implements simple version of Bitcoin Hierarchical Deterministic key management. """
|
||||||
|
|
||||||
name = "deterministic-chain"
|
name: str = "deterministic-chain"
|
||||||
|
|
||||||
__slots__ = 'gap', 'maximum_uses_per_address'
|
__slots__ = 'gap', 'maximum_uses_per_address'
|
||||||
|
|
||||||
|
@ -164,7 +164,7 @@ class HierarchicalDeterministic(AddressManager):
|
||||||
class SingleKey(AddressManager):
|
class SingleKey(AddressManager):
|
||||||
""" Single Key address manager always returns the same address for all operations. """
|
""" Single Key address manager always returns the same address for all operations. """
|
||||||
|
|
||||||
name = "single-address"
|
name: str = "single-address"
|
||||||
|
|
||||||
__slots__ = ()
|
__slots__ = ()
|
||||||
|
|
||||||
|
|
|
@ -102,20 +102,20 @@ def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
|
||||||
if not key:
|
if not key:
|
||||||
sql.append(constraint)
|
sql.append(constraint)
|
||||||
continue
|
continue
|
||||||
elif key.startswith('$'):
|
if key.startswith('$'):
|
||||||
values[key] = constraint
|
values[key] = constraint
|
||||||
continue
|
continue
|
||||||
elif key.endswith('__not'):
|
if key.endswith('__not'):
|
||||||
col, op = col[:-len('__not')], '!='
|
col, op = col[:-len('__not')], '!='
|
||||||
elif key.endswith('__is_null'):
|
elif key.endswith('__is_null'):
|
||||||
col = col[:-len('__is_null')]
|
col = col[:-len('__is_null')]
|
||||||
sql.append(f'{col} IS NULL')
|
sql.append(f'{col} IS NULL')
|
||||||
continue
|
continue
|
||||||
elif key.endswith('__is_not_null'):
|
if key.endswith('__is_not_null'):
|
||||||
col = col[:-len('__is_not_null')]
|
col = col[:-len('__is_not_null')]
|
||||||
sql.append(f'{col} IS NOT NULL')
|
sql.append(f'{col} IS NOT NULL')
|
||||||
continue
|
continue
|
||||||
elif key.endswith('__lt'):
|
if key.endswith('__lt'):
|
||||||
col, op = col[:-len('__lt')], '<'
|
col, op = col[:-len('__lt')], '<'
|
||||||
elif key.endswith('__lte'):
|
elif key.endswith('__lte'):
|
||||||
col, op = col[:-len('__lte')], '<='
|
col, op = col[:-len('__lte')], '<='
|
||||||
|
@ -149,7 +149,7 @@ def constraints_to_sql(constraints, joiner=' AND ', prepend_key=''):
|
||||||
sql.append(f'({where})')
|
sql.append(f'({where})')
|
||||||
values.update(subvalues)
|
values.update(subvalues)
|
||||||
continue
|
continue
|
||||||
elif key.endswith('__and'):
|
if key.endswith('__and'):
|
||||||
where, subvalues = constraints_to_sql(constraint, ' AND ', key+tag+'_')
|
where, subvalues = constraints_to_sql(constraint, ' AND ', key+tag+'_')
|
||||||
sql.append(f'({where})')
|
sql.append(f'({where})')
|
||||||
values.update(subvalues)
|
values.update(subvalues)
|
||||||
|
@ -554,7 +554,7 @@ class BaseDatabase(SQLiteMixin):
|
||||||
row[1], height=row[2], position=row[3], is_verified=row[4]
|
row[1], height=row[2], position=row[3], is_verified=row[4]
|
||||||
)
|
)
|
||||||
txo = txs[row[0]].outputs[row[5]]
|
txo = txs[row[0]].outputs[row[5]]
|
||||||
row_accounts = {k: v for k, v in (a.split('|') for a in row[8].split(','))}
|
row_accounts = dict(a.split('|') for a in row[8].split(','))
|
||||||
account_match = set(row_accounts) & my_accounts
|
account_match = set(row_accounts) & my_accounts
|
||||||
if account_match:
|
if account_match:
|
||||||
txo.is_my_account = True
|
txo.is_my_account = True
|
||||||
|
|
|
@ -507,7 +507,6 @@ class BaseTransaction:
|
||||||
|
|
||||||
if tx._outputs:
|
if tx._outputs:
|
||||||
break
|
break
|
||||||
else:
|
|
||||||
# this condition and the outer range(5) loop cover an edge case
|
# this condition and the outer range(5) loop cover an edge case
|
||||||
# whereby a single input is just enough to cover the fee and
|
# whereby a single input is just enough to cover the fee and
|
||||||
# has some change left over, but the change left over is less
|
# has some change left over, but the change left over is less
|
||||||
|
|
|
@ -14,11 +14,11 @@ from torba.client.baseheader import BaseHeaders, ArithUint256
|
||||||
|
|
||||||
|
|
||||||
class MainHeaders(BaseHeaders):
|
class MainHeaders(BaseHeaders):
|
||||||
header_size = 80
|
header_size: int = 80
|
||||||
chunk_size = 2016
|
chunk_size: int = 2016
|
||||||
max_target = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
|
max_target: int = 0x00000000ffffffffffffffffffffffffffffffffffffffffffffffffffffffff
|
||||||
genesis_hash: Optional[bytes] = b'000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
|
genesis_hash: Optional[bytes] = b'000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f'
|
||||||
target_timespan = 14 * 24 * 60 * 60
|
target_timespan: int = 14 * 24 * 60 * 60
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def serialize(header: dict) -> bytes:
|
def serialize(header: dict) -> bytes:
|
||||||
|
|
Loading…
Reference in a new issue