Merge branch 'fix-windows-signing'

* fix-windows-signing:
  pylint type checking
  pointless strings
  pylint
  try decrypting cert
  re-enable secure vars
This commit is contained in:
Alex Grintsvayg 2017-04-25 14:37:04 -04:00
commit 06fab24437
11 changed files with 40 additions and 42 deletions

View file

@ -3,12 +3,12 @@ version: 1.0.{build}
environment:
GH_TOKEN:
secure: LiI5jyuHUw6XbH4kC3gP1HX4P/v4rwD/gCNtaFhQu2AvJz1/1wALkp5ECnIxRySN
# key_pass:
# secure: u6DydPcdrUJlxGL9uc7yQRYG8+5rY6aAEE9nfCSzFyNzZlX9NniOp8Uh5ZKQqX7bGEngLI6ipbLfiJvn0XFnhbn2iTkOuMqOXVJVOehvwlQ=
# pfx_key:
# secure: 1mwqyRy7hDqDjDK+TIAoaXyXzpNgwruFNA6TPkinUcVM7A+NLD33RQLnfnwVy+R5ovD2pUfhQ6+N0Fqebv6tZh436LIEsock+6IOdpgFwrg=
key_pass:
secure: u6DydPcdrUJlxGL9uc7yQRYG8+5rY6aAEE9nfCSzFyNzZlX9NniOp8Uh5ZKQqX7bGEngLI6ipbLfiJvn0XFnhbn2iTkOuMqOXVJVOehvwlQ=
pfx_key:
secure: 1mwqyRy7hDqDjDK+TIAoaXyXzpNgwruFNA6TPkinUcVM7A+NLD33RQLnfnwVy+R5ovD2pUfhQ6+N0Fqebv6tZh436LIEsock+6IOdpgFwrg=
#
#notifications:
# - provider: Slack
# incoming_webhook:

View file

@ -92,14 +92,12 @@ disable=
no-member,
no-self-use,
not-context-manager,
pointless-string-statement,
protected-access,
redefined-builtin,
redefined-outer-name,
redefined-variable-type,
relative-import,
signature-differs,
singleton-comparison,
super-init-not-called,
too-few-public-methods,
too-many-arguments,
@ -114,7 +112,6 @@ disable=
trailing-newlines,
undefined-loop-variable,
ungrouped-imports,
unidiomatic-typecheck,
unnecessary-lambda,
unused-argument,
unused-variable,

View file

@ -19,11 +19,16 @@ Get-Content ..\requirements.txt | Select-String -Pattern 'gmpy|miniupnpc' -NotMa
Add-Content requirements.txt "./gmpy-1.17-cp27-none-win32.whl"
pip install -r requirements.txt
pip install ..\.
python set_build.py
pyinstaller -y daemon.onefile.spec
pyinstaller -y cli.onefile.spec
nuget install secure-file -ExcludeVersion
secure-file\tools\secure-file -decrypt .\lbry2.pfx.enc -secret "$env:pfx_key"
signtool.exe sign /f .\lbry2.pfx /p "$env:key_pass" /tr http://tsa.starfieldtech.com /td SHA256 /fd SHA256 dist\*.exe
python zip_daemon.py
python release_on_tag.py

BIN
build/lbry2.pfx.enc Normal file

Binary file not shown.

View file

@ -51,7 +51,7 @@ class ClaimOutpoint(dict):
return (self['txid'], self['nout']) == (compare['txid'], compare['nOut'])
elif 'nout' in compare:
return (self['txid'], self['nout']) == (compare['txid'], compare['nout'])
elif type(compare) in [str, unicode]:
elif isinstance(compare, (str, unicode)):
return compare == self.__repr__()
else:
raise TypeError('cannot compare {}'.format(type(compare)))

View file

@ -241,7 +241,7 @@ class RequestHelper(object):
def _handle_incoming_blob(response_dict, peer, request):
if request.response_identifier not in response_dict:
return InvalidResponseError("response identifier not in response")
if type(response_dict[request.response_identifier]) != dict:
if not isinstance(response_dict[request.response_identifier], dict):
return InvalidResponseError("response not a dict. got %s" %
type(response_dict[request.response_identifier]))
response = response_dict[request.response_identifier]

View file

@ -63,16 +63,16 @@ class Bencode(Encoding):
@return: The encoded data
@rtype: str
"""
if type(data) in (int, long):
if isinstance(data, (int, long)):
return 'i%de' % data
elif type(data) == str:
elif isinstance(data, str):
return '%d:%s' % (len(data), data)
elif type(data) in (list, tuple):
elif isinstance(data, (list, tuple)):
encodedListItems = ''
for item in data:
encodedListItems += self.encode(item)
return 'l%se' % encodedListItems
elif type(data) == dict:
elif isinstance(data, dict):
encodedDictItems = ''
keys = data.keys()
keys.sort()
@ -80,16 +80,16 @@ class Bencode(Encoding):
encodedDictItems += self.encode(key)
encodedDictItems += self.encode(data[key])
return 'd%se' % encodedDictItems
elif type(data) == float:
elif isinstance(data, float):
# This (float data type) is a non-standard extension to the original Bencode algorithm
return 'f%fe' % data
elif data == None:
elif data is None:
# This (None/NULL data type) is a non-standard extension
# to the original Bencode algorithm
return 'n'
else:
print data
raise TypeError, "Cannot bencode '%s' object" % type(data)
raise TypeError("Cannot bencode '%s' object" % type(data))
def decode(self, data):
""" Decoder implementation of the Bencode algorithm
@ -104,11 +104,11 @@ class Bencode(Encoding):
@rtype: int, list, dict or str
"""
if len(data) == 0:
raise DecodeError, 'Cannot decode empty string'
raise DecodeError('Cannot decode empty string')
try:
return self._decodeRecursive(data)[0]
except ValueError as e:
raise DecodeError, e.message
raise DecodeError(e.message)
@staticmethod
def _decodeRecursive(data, startIndex=0):
@ -118,14 +118,14 @@ class Bencode(Encoding):
"""
if data[startIndex] == 'i':
endPos = data[startIndex:].find('e') + startIndex
return (int(data[startIndex + 1:endPos]), endPos + 1)
return int(data[startIndex + 1:endPos]), endPos + 1
elif data[startIndex] == 'l':
startIndex += 1
decodedList = []
while data[startIndex] != 'e':
listData, startIndex = Bencode._decodeRecursive(data, startIndex)
decodedList.append(listData)
return (decodedList, startIndex + 1)
return decodedList, startIndex + 1
elif data[startIndex] == 'd':
startIndex += 1
decodedDict = {}
@ -133,15 +133,15 @@ class Bencode(Encoding):
key, startIndex = Bencode._decodeRecursive(data, startIndex)
value, startIndex = Bencode._decodeRecursive(data, startIndex)
decodedDict[key] = value
return (decodedDict, startIndex)
return decodedDict, startIndex
elif data[startIndex] == 'f':
# This (float data type) is a non-standard extension to the original Bencode algorithm
endPos = data[startIndex:].find('e') + startIndex
return (float(data[startIndex + 1:endPos]), endPos + 1)
return float(data[startIndex + 1:endPos]), endPos + 1
elif data[startIndex] == 'n':
# This (None/NULL data type) is a non-standard extension
# to the original Bencode algorithm
return (None, startIndex + 1)
return None, startIndex + 1
else:
splitPos = data[startIndex:].find(':') + startIndex
try:
@ -151,4 +151,4 @@ class Bencode(Encoding):
startIndex = splitPos + 1
endPos = startIndex + length
bytes = data[startIndex:endPos]
return (bytes, endPos)
return bytes, endPos

View file

@ -22,7 +22,7 @@ class RequestMessage(Message):
""" Message containing an RPC request """
def __init__(self, nodeID, method, methodArgs, rpcID=None):
if rpcID == None:
if rpcID is None:
rpcID = generate_id()
Message.__init__(self, rpcID, nodeID)
self.request = method

View file

@ -93,13 +93,13 @@ class Node(object):
self.next_refresh_call = None
self.next_change_token_call = None
# Create k-buckets (for storing contacts)
if routingTableClass == None:
if routingTableClass is None:
self._routingTable = routingtable.OptimizedTreeRoutingTable(self.id)
else:
self._routingTable = routingTableClass(self.id)
# Initialize this node's network access mechanisms
if networkProtocol == None:
if networkProtocol is None:
self._protocol = protocol.KademliaProtocol(self)
else:
self._protocol = networkProtocol
@ -107,7 +107,7 @@ class Node(object):
self.token_secret = self._generateID()
self.old_token_secret = None
self.change_token()
if dataStore == None:
if dataStore is None:
self._dataStore = datastore.DictDataStore()
else:
self._dataStore = dataStore
@ -207,7 +207,7 @@ class Node(object):
def expand_and_filter(result):
expanded_peers = []
if type(result) == dict:
if isinstance(result, dict):
if blob_hash in result:
for peer in result[blob_hash]:
if self.lbryid != peer[6:]:
@ -353,7 +353,7 @@ class Node(object):
outerDf = defer.Deferred()
def checkResult(result):
if type(result) == dict:
if isinstance(result, dict):
# We have found the value; now see who was the closest contact without it...
# ...and store the key/value pair
outerDf.callback(result)
@ -454,7 +454,7 @@ class Node(object):
to fix this (perhaps use a stream from the Protocol class?)
"""
# Get the sender's ID (if any)
if originalPublisherID == None:
if originalPublisherID is None:
if '_rpcNodeID' in kwargs:
originalPublisherID = kwargs['_rpcNodeID']
else:
@ -471,7 +471,7 @@ class Node(object):
# raise TypeError, 'No contact info available'
if ((self_store is False) and
(not 'token' in value or not self.verify_token(value['token'], compact_ip))):
('token' not in value or not self.verify_token(value['token'], compact_ip))):
raise ValueError('Invalid or missing token')
if 'port' in value:
@ -584,7 +584,7 @@ class Node(object):
"""
findValue = rpc != 'findNode'
if startupShortlist == None:
if startupShortlist is None:
shortlist = self._routingTable.findCloseNodes(key, constants.alpha)
if key != self.id:
# Update the "last accessed" timestamp for the appropriate k-bucket
@ -776,7 +776,7 @@ class _IterativeFindHelper(object):
if self.key in self.find_value_result:
self.outer_d.callback(self.find_value_result)
return
elif len(self.active_contacts) and self.find_value == False:
elif len(self.active_contacts) and self.find_value is False:
if self._is_all_done():
# TODO: Re-send the FIND_NODEs to all of the k closest nodes not already queried
#

View file

@ -20,11 +20,9 @@ class Publisher(object):
self.certificate_id = certificate_id
self.lbry_file = None
"""
Create lbry file and make claim
"""
@defer.inlineCallbacks
def create_and_publish_stream(self, name, bid, claim_dict, file_path):
"""Create lbry file and make claim"""
log.info('Starting publish for %s', name)
file_name = os.path.basename(file_path)
with file_utils.get_read_handle(file_path) as read_handle:
@ -47,11 +45,9 @@ class Publisher(object):
yield self.lbry_file.save_status()
defer.returnValue(claim_out)
"""
Make a claim without creating a lbry file
"""
@defer.inlineCallbacks
def publish_stream(self, name, bid, claim_dict):
"""Make a claim without creating a lbry file"""
claim_out = yield self.make_claim(name, bid, claim_dict)
defer.returnValue(claim_out)

View file

@ -24,7 +24,7 @@ __version__ = '0.3.0'
def undecorated(o):
"""Remove all decorators from a function, method or class"""
# class decorator
if type(o) is type:
if isinstance(o, type):
return o
try: