2017-02-21 04:00:54 +01:00
|
|
|
import glob
|
|
|
|
import json
|
|
|
|
import os
|
|
|
|
import platform
|
|
|
|
import subprocess
|
|
|
|
import sys
|
2017-02-21 22:13:41 +01:00
|
|
|
|
2017-02-21 04:00:54 +01:00
|
|
|
import github
|
|
|
|
import uritemplate
|
2017-05-24 20:45:57 +02:00
|
|
|
import boto3
|
2017-02-21 04:00:54 +01:00
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
S3_BUCKET = 'releases.lbry.io'
|
|
|
|
RELEASES_S3_PATH = 'app'
|
|
|
|
LATEST_S3_PATH = 'app/latest'
|
2017-02-21 04:00:54 +01:00
|
|
|
|
2017-03-03 04:43:34 +01:00
|
|
|
def main():
|
2017-05-24 20:45:57 +02:00
|
|
|
upload_to_github_if_tagged('lbryio/lbry-app')
|
2017-12-02 07:37:09 +01:00
|
|
|
upload_to_s3(RELEASES_S3_PATH)
|
2017-05-24 20:45:57 +02:00
|
|
|
|
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
def get_asset_path():
|
2017-05-24 20:45:57 +02:00
|
|
|
this_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
system = platform.system()
|
|
|
|
if system == 'Darwin':
|
2017-11-19 06:15:18 +01:00
|
|
|
suffix = 'dmg'
|
2017-05-24 20:45:57 +02:00
|
|
|
elif system == 'Linux':
|
2017-11-19 06:15:18 +01:00
|
|
|
suffix = 'deb'
|
2017-05-24 20:45:57 +02:00
|
|
|
elif system == 'Windows':
|
2017-11-19 06:15:18 +01:00
|
|
|
suffix = 'exe'
|
2017-05-24 20:45:57 +02:00
|
|
|
else:
|
|
|
|
raise Exception("I don't know about any artifact on {}".format(system))
|
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.' + suffix)[0])
|
2017-11-19 06:15:18 +01:00
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
def get_update_asset_path():
|
|
|
|
# Get the asset used used for updates. On Mac, this is a .zip; on
|
|
|
|
# Windows it's just the installer file.
|
|
|
|
if platform.system() == 'Darwin':
|
|
|
|
this_dir = os.path.dirname(os.path.realpath(__file__))
|
|
|
|
return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.zip')[0])
|
|
|
|
else:
|
|
|
|
return get_asset_path()
|
2017-05-24 20:45:57 +02:00
|
|
|
|
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
def get_latest_file_path():
|
|
|
|
# The update metadata file is called latest.yml on Windows, latest-mac.yml on
|
|
|
|
# Mac, latest-linux.yml on Linux
|
|
|
|
this_dir = os.path.dirname(os.path.realpath(__file__))
|
2017-12-07 09:08:27 +01:00
|
|
|
|
|
|
|
latestfilematches = glob.glob(this_dir + '/../dist/latest*.yml')
|
|
|
|
|
|
|
|
return latestfilematches[0] if latestfilematches else None
|
2017-05-24 20:45:57 +02:00
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
|
|
|
|
def upload_to_s3(folder):
|
|
|
|
asset_path = get_asset_path()
|
2017-05-24 20:45:57 +02:00
|
|
|
|
|
|
|
if 'AWS_ACCESS_KEY_ID' not in os.environ or 'AWS_SECRET_ACCESS_KEY' not in os.environ:
|
|
|
|
print 'Must set AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY to publish assets to s3'
|
|
|
|
return 1
|
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
asset_filename = os.path.basename(asset_path)
|
|
|
|
|
|
|
|
tag = subprocess.check_output(['git', 'describe', '--always', '--abbrev=8', 'HEAD']).strip()
|
|
|
|
commit_date = subprocess.check_output([
|
|
|
|
'git', 'show', '-s', '--format=%cd', '--date=format:%Y%m%d-%H%I%S', 'HEAD']).strip()
|
|
|
|
key = folder + '/' + commit_date + '-' + tag + '/' + asset_filename
|
|
|
|
|
|
|
|
print "Uploading asset file at " + asset_path + " to s3://" + S3_BUCKET + '/' + key
|
|
|
|
|
2017-05-24 20:45:57 +02:00
|
|
|
s3 = boto3.resource(
|
|
|
|
's3',
|
|
|
|
aws_access_key_id=os.environ['AWS_ACCESS_KEY_ID'],
|
|
|
|
aws_secret_access_key=os.environ['AWS_SECRET_ACCESS_KEY'],
|
|
|
|
config=boto3.session.Config(signature_version='s3v4')
|
|
|
|
)
|
2017-12-02 07:37:09 +01:00
|
|
|
|
|
|
|
s3.Object(S3_BUCKET, key).upload_file(asset_path)
|
|
|
|
|
|
|
|
# Populate the update bucket (update.lbry.io)
|
|
|
|
|
|
|
|
update_asset_path = get_update_asset_path()
|
|
|
|
if asset_path == update_asset_path:
|
|
|
|
# If the update asset and the regular built file are the same, we can
|
|
|
|
# just copy over.
|
|
|
|
print "Copying asset file to s3://" + S3_BUCKET + "/" + LATEST_S3_PATH + "/" + asset_filename
|
|
|
|
s3.Object(S3_BUCKET, LATEST_S3_PATH + "/" + asset_filename).copy_from(CopySource={
|
|
|
|
'Bucket': S3_BUCKET,
|
|
|
|
'Key': key
|
|
|
|
})
|
|
|
|
else:
|
|
|
|
update_asset_filename = os.path.basename(update_asset_path)
|
|
|
|
print "Uploading update asset file at", update_asset_path, \
|
|
|
|
"to s3://" + S3_BUCKET + "/" + LATEST_S3_PATH + "/" + update_asset_filename
|
|
|
|
s3.Object(S3_BUCKET, LATEST_S3_PATH + "/" + update_asset_filename).upload_file(update_asset_path)
|
|
|
|
|
|
|
|
metadatafilepath = get_latest_file_path()
|
|
|
|
|
2017-12-07 09:08:27 +01:00
|
|
|
if metadatafilepath is not None:
|
|
|
|
# For some reason latest-linux.yml isn't being created, but it's OK because updates don't
|
|
|
|
# work on Linux yet anyway.
|
|
|
|
metadatafilename = os.path.basename(metadatafilepath)
|
|
|
|
|
|
|
|
print "Uploading update metadata file at", metadatafilepath, "to S3"
|
|
|
|
s3.Object(S3_BUCKET, LATEST_S3_PATH + "/" + metadatafilename).upload_file(metadatafilepath)
|
2017-05-24 20:45:57 +02:00
|
|
|
|
|
|
|
|
|
|
|
def upload_to_github_if_tagged(repo_name):
|
2017-02-21 04:00:54 +01:00
|
|
|
try:
|
|
|
|
current_tag = subprocess.check_output(
|
|
|
|
['git', 'describe', '--exact-match', 'HEAD']).strip()
|
|
|
|
except subprocess.CalledProcessError:
|
2017-05-24 20:45:57 +02:00
|
|
|
print 'Not uploading to GitHub as we are not currently on a tag'
|
|
|
|
return 1
|
|
|
|
|
|
|
|
print "Current tag: " + current_tag
|
2017-02-21 19:13:44 +01:00
|
|
|
|
|
|
|
if 'GH_TOKEN' not in os.environ:
|
|
|
|
print 'Must set GH_TOKEN in order to publish assets to a release'
|
2017-05-24 20:45:57 +02:00
|
|
|
return 1
|
2017-02-21 19:13:44 +01:00
|
|
|
|
|
|
|
gh_token = os.environ['GH_TOKEN']
|
|
|
|
auth = github.Github(gh_token)
|
2017-05-24 20:45:57 +02:00
|
|
|
repo = auth.get_repo(repo_name)
|
2017-02-21 19:13:44 +01:00
|
|
|
|
2017-04-11 16:38:32 +02:00
|
|
|
if not check_repo_has_tag(repo, current_tag):
|
|
|
|
print 'Tag {} is not in repo {}'.format(current_tag, repo)
|
2017-02-21 04:00:54 +01:00
|
|
|
# TODO: maybe this should be an error
|
2017-05-24 20:45:57 +02:00
|
|
|
return 1
|
2017-02-21 04:00:54 +01:00
|
|
|
|
2017-12-02 07:37:09 +01:00
|
|
|
asset_path = get_asset_path()
|
2017-05-24 20:45:57 +02:00
|
|
|
print "Uploading " + asset_path + " to Github tag " + current_tag
|
|
|
|
release = get_github_release(repo, current_tag)
|
|
|
|
upload_asset_to_github(release, asset_path, gh_token)
|
2017-02-23 00:00:19 +01:00
|
|
|
|
2017-02-21 04:00:54 +01:00
|
|
|
|
|
|
|
def check_repo_has_tag(repo, target_tag):
|
|
|
|
tags = repo.get_tags().get_page(0)
|
|
|
|
for tag in tags:
|
|
|
|
if tag.name == target_tag:
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
|
|
|
|
2017-05-24 20:45:57 +02:00
|
|
|
def get_github_release(repo, current_tag):
|
|
|
|
for release in repo.get_releases():
|
2017-02-21 04:55:53 +01:00
|
|
|
if release.tag_name == current_tag:
|
|
|
|
return release
|
|
|
|
raise Exception('No release for {} was found'.format(current_tag))
|
2017-02-21 04:00:54 +01:00
|
|
|
|
|
|
|
|
2017-05-24 20:45:57 +02:00
|
|
|
def upload_asset_to_github(release, asset_to_upload, token):
|
2017-02-23 15:51:59 +01:00
|
|
|
basename = os.path.basename(asset_to_upload)
|
2017-05-24 20:45:57 +02:00
|
|
|
for asset in release.raw_data['assets']:
|
|
|
|
if asset['name'] == basename:
|
|
|
|
print 'File {} has already been uploaded to {}'.format(basename, release.tag_name)
|
|
|
|
return
|
|
|
|
|
|
|
|
upload_uri = uritemplate.expand(release.upload_url, {'name': basename})
|
2017-02-22 23:47:30 +01:00
|
|
|
count = 0
|
|
|
|
while count < 10:
|
|
|
|
try:
|
2017-05-24 20:45:57 +02:00
|
|
|
output = _curl_uploader(upload_uri, asset_to_upload, token)
|
|
|
|
if 'errors' in output:
|
|
|
|
raise Exception(output)
|
|
|
|
else:
|
|
|
|
print 'Successfully uploaded to {}'.format(output['browser_download_url'])
|
2017-02-22 23:47:30 +01:00
|
|
|
except Exception:
|
2017-04-11 16:38:32 +02:00
|
|
|
print 'Failed uploading on attempt {}'.format(count + 1)
|
2017-02-22 23:47:30 +01:00
|
|
|
count += 1
|
|
|
|
|
|
|
|
|
2017-02-23 15:51:59 +01:00
|
|
|
def _curl_uploader(upload_uri, asset_to_upload, token):
|
2017-02-21 04:00:54 +01:00
|
|
|
# using requests.post fails miserably with SSL EPIPE errors. I spent
|
|
|
|
# half a day trying to debug before deciding to switch to curl.
|
2017-02-21 22:13:41 +01:00
|
|
|
#
|
|
|
|
# TODO: actually set the content type
|
2017-04-11 16:38:32 +02:00
|
|
|
print 'Using curl to upload {} to {}'.format(asset_to_upload, upload_uri)
|
2017-02-21 04:00:54 +01:00
|
|
|
cmd = [
|
2017-02-22 22:24:19 +01:00
|
|
|
'curl',
|
|
|
|
'-sS',
|
|
|
|
'-X', 'POST',
|
|
|
|
'-u', ':{}'.format(os.environ['GH_TOKEN']),
|
2017-02-23 19:11:05 +01:00
|
|
|
'--header', 'Content-Type: application/octet-stream',
|
|
|
|
'--data-binary', '@-',
|
|
|
|
upload_uri
|
2017-02-21 04:00:54 +01:00
|
|
|
]
|
2017-03-03 04:43:34 +01:00
|
|
|
# '-d', '{"some_key": "some_value"}',
|
2017-02-22 22:11:52 +01:00
|
|
|
print 'Calling curl:'
|
|
|
|
print cmd
|
|
|
|
print
|
2017-02-23 19:11:05 +01:00
|
|
|
with open(asset_to_upload, 'rb') as fp:
|
|
|
|
p = subprocess.Popen(cmd, stdin=fp, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
|
2017-02-22 20:33:37 +01:00
|
|
|
stdout, stderr = p.communicate()
|
|
|
|
print 'curl return code:', p.returncode
|
|
|
|
if stderr:
|
|
|
|
print 'stderr output from curl:'
|
|
|
|
print stderr
|
|
|
|
print 'stdout from curl:'
|
|
|
|
print stdout
|
2017-04-11 16:38:32 +02:00
|
|
|
return json.loads(stdout)
|
2017-02-21 04:00:54 +01:00
|
|
|
|
|
|
|
|
|
|
|
if __name__ == '__main__':
|
|
|
|
sys.exit(main())
|