chore: clean-up unused files
This commit is contained in:
parent
234d815dcc
commit
2999ad4f6e
12 changed files with 0 additions and 803 deletions
|
@ -1,27 +0,0 @@
|
|||
# Test against the latest version of this Node.js version
|
||||
environment:
|
||||
nodejs_version: 7
|
||||
GH_TOKEN:
|
||||
secure: LiI5jyuHUw6XbH4kC3gP1HX4P/v4rwD/gCNtaFhQu2AvJz1/1wALkp5ECnIxRySN
|
||||
pfx_key:
|
||||
secure: 1mwqyRy7hDqDjDK+TIAoaXyXzpNgwruFNA6TPkinUcVM7A+NLD33RQLnfnwVy+R5ovD2pUfhQ6+N0Fqebv6tZh436LIEsock+6IOdpgFwrg=
|
||||
AWS_ACCESS_KEY_ID:
|
||||
secure: iVGwoJ7ogspjSmuqr+haVPLglSgQsp6tUZx6mIlKH7Q=
|
||||
AWS_SECRET_ACCESS_KEY:
|
||||
secure: zKaqdZGPl0exDL5YhJkb33prSemC9Rzg9S7Lw2wFy1WnJ6ffgl6mQH7jqJDUTqsY
|
||||
CSC_LINK: build\lbry3.pfx
|
||||
CSC_KEY_PASSWORD:
|
||||
secure: u6DydPcdrUJlxGL9uc7yQRYG8+5rY6aAEE9nfCSzFyNzZlX9NniOp8Uh5ZKQqX7bGEngLI6ipbLfiJvn0XFnhbn2iTkOuMqOXVJVOehvwlQ=
|
||||
|
||||
skip_branch_with_pr: false
|
||||
|
||||
clone_folder: C:\projects\lbry-app
|
||||
|
||||
build_script:
|
||||
- ps: build\build.ps1
|
||||
|
||||
test: off
|
||||
|
||||
artifacts:
|
||||
- path: dist\*.exe
|
||||
name: LBRY
|
|
@ -1,22 +0,0 @@
|
|||
[bumpversion]
|
||||
current_version = 0.20.0
|
||||
commit = True
|
||||
tag = True
|
||||
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(-(?P<release>[a-z]+)(?P<candidate>\d+))?
|
||||
serialize =
|
||||
{major}.{minor}.{patch}-{release}{candidate}
|
||||
{major}.{minor}.{patch}
|
||||
|
||||
[bumpversion:part:candidate]
|
||||
first_value = 1
|
||||
|
||||
[bumpversion:part:release]
|
||||
optional_value = production
|
||||
values =
|
||||
rc
|
||||
production
|
||||
|
||||
[bumpversion:file:package.json]
|
||||
search = "version": "{current_version}"
|
||||
replace = "version": "{new_version}"
|
||||
|
|
@ -1,6 +0,0 @@
|
|||
# Add a CHANGELOG entry for app changes
|
||||
has_app_changes = !(git.modified_files.grep(/js/).empty? && git.modified_files.grep(/scss/).empty?)
|
||||
if !git.modified_files.include?("CHANGELOG.md") && has_app_changes
|
||||
fail("Please include a CHANGELOG entry.")
|
||||
message "See http://keepachangelog.com/en/0.3.0/ for details on good changelog guidelines"
|
||||
end
|
4
build.sh
4
build.sh
|
@ -1,4 +0,0 @@
|
|||
#!/bin/bash
|
||||
# this is here because teamcity runs /build.sh to build the project
|
||||
DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )"
|
||||
"$DIR/build/build.sh"
|
|
@ -1,32 +0,0 @@
|
|||
pip install -r build\requirements.txt
|
||||
|
||||
# Decrypt cert
|
||||
nuget install secure-file -ExcludeVersion
|
||||
secure-file\tools\secure-file -decrypt build\lbry3.pfx.enc -secret "$env:pfx_key"
|
||||
|
||||
# Get the latest stable version of Node.js or io.js
|
||||
Install-Product node $env:nodejs_version
|
||||
npm install -g yarn
|
||||
yarn install
|
||||
|
||||
# clean dist\
|
||||
if (Test-Path -Path dist\) {
|
||||
Remove-Item -Recurse -Force dist\
|
||||
}
|
||||
New-Item -ItemType directory -Path dist\
|
||||
|
||||
# get daemon and cli executable
|
||||
$package_settings = (Get-Content package.json -Raw | ConvertFrom-Json).lbrySettings
|
||||
$daemon_ver = $package_settings.lbrynetDaemonVersion
|
||||
$daemon_url_template = $package_settings.lbrynetDaemonUrlTemplate
|
||||
$daemon_url = $daemon_url_template.Replace('OSNAME', 'windows').Replace('DAEMONVER', $daemon_ver)
|
||||
Invoke-WebRequest -Uri $daemon_url -OutFile daemon.zip
|
||||
Expand-Archive daemon.zip -DestinationPath static\daemon\
|
||||
dir static\daemon\ # verify that daemon binary is there
|
||||
rm daemon.zip
|
||||
|
||||
# build electron app
|
||||
yarn build
|
||||
dir dist # verify that binary was built/named correctly
|
||||
|
||||
python build\upload_assets.py
|
|
@ -1,59 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )"
|
||||
cd "$ROOT"
|
||||
BUILD_DIR="$ROOT/build"
|
||||
|
||||
LINUX=false
|
||||
OSX=false
|
||||
if [ "$(uname)" == "Darwin" ]; then
|
||||
echo -e "\033[0;32mBuilding for OSX\x1b[m"
|
||||
OSX=true
|
||||
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
|
||||
echo -e "\033[0;32mBuilding for Linux\x1b[m"
|
||||
LINUX=true
|
||||
else
|
||||
echo -e "\033[1;31mPlatform detection failed\x1b[m"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if $OSX; then
|
||||
ICON="$BUILD_DIR/icon.icns"
|
||||
else
|
||||
ICON="$BUILD_DIR/icons/48x48.png"
|
||||
fi
|
||||
|
||||
FULL_BUILD="${FULL_BUILD:-false}"
|
||||
if [ -n "${TEAMCITY_VERSION:-}" -o -n "${APPVEYOR:-}" ]; then
|
||||
FULL_BUILD="true"
|
||||
fi
|
||||
|
||||
DEPS="${DEPS:-$FULL_BUILD}"
|
||||
if [ "$DEPS" != "true" ]; then
|
||||
echo -e "\033[1;36mDependencies will NOT be installed. Run with \"INSTALL_DEPENDENCIES=true\" to install dependencies, or \"FULL_BUILD=true\" to install dependencies and build a complete app.\x1b[m"
|
||||
else
|
||||
# install dependencies
|
||||
echo -e "\033[0;32mInstalling Dependencies\x1b[m"
|
||||
"$BUILD_DIR/install_deps.sh"
|
||||
fi
|
||||
|
||||
[ -d "$ROOT/dist" ] && rm -rf "$ROOT/dist"
|
||||
|
||||
yarn install
|
||||
|
||||
###################
|
||||
# Build the app #
|
||||
###################
|
||||
if [ "$FULL_BUILD" == "true" ]; then
|
||||
if $OSX; then
|
||||
security unlock-keychain -p ${KEYCHAIN_PASSWORD} osx-build.keychain
|
||||
fi
|
||||
|
||||
yarn build
|
||||
|
||||
echo -e '\033[0;32mBuild and packaging complete.\x1b[m'
|
||||
else
|
||||
echo -e 'Build complete. Run \033[1;31myarn dev\x1b[m to launch the app'
|
||||
fi
|
|
@ -1,129 +0,0 @@
|
|||
import datetime
|
||||
import re
|
||||
|
||||
CHANGELOG_START_RE = re.compile(r'^\#\# \[Unreleased\]')
|
||||
CHANGELOG_END_RE = re.compile(r'^\#\# \[.*\] - \d{4}-\d{2}-\d{2}')
|
||||
# if we come across a section header between two release section headers
|
||||
# then we probably have an improperly formatted changelog
|
||||
CHANGELOG_ERROR_RE = re.compile(r'^\#\# ')
|
||||
SECTION_RE = re.compile(r'^\#\#\# (.*)$')
|
||||
EMPTY_RE = re.compile(r'^\w*\*\w*$')
|
||||
ENTRY_RE = re.compile(r'\* (.*)')
|
||||
VALID_SECTIONS = ['Added', 'Changed', 'Deprecated', 'Removed', 'Fixed', 'Security']
|
||||
|
||||
# allocate some entries to cut-down on merge conflicts
|
||||
TEMPLATE = """### Added
|
||||
*
|
||||
*
|
||||
|
||||
### Changed
|
||||
*
|
||||
*
|
||||
|
||||
### Fixed
|
||||
*
|
||||
*
|
||||
|
||||
### Deprecated
|
||||
*
|
||||
*
|
||||
|
||||
### Removed
|
||||
*
|
||||
*
|
||||
|
||||
"""
|
||||
|
||||
|
||||
class Changelog(object):
|
||||
def __init__(self, path):
|
||||
self.path = path
|
||||
self.start = []
|
||||
self.unreleased = []
|
||||
self.rest = []
|
||||
self._parse()
|
||||
|
||||
def _parse(self):
|
||||
with open(self.path) as fp:
|
||||
lines = fp.readlines()
|
||||
|
||||
unreleased_start_found = False
|
||||
unreleased_end_found = False
|
||||
|
||||
for line in lines:
|
||||
if not unreleased_start_found:
|
||||
self.start.append(line)
|
||||
if CHANGELOG_START_RE.search(line):
|
||||
unreleased_start_found = True
|
||||
continue
|
||||
if unreleased_end_found:
|
||||
self.rest.append(line)
|
||||
continue
|
||||
if CHANGELOG_END_RE.search(line):
|
||||
self.rest.append(line)
|
||||
unreleased_end_found = True
|
||||
continue
|
||||
if CHANGELOG_ERROR_RE.search(line):
|
||||
raise Exception(
|
||||
'Failed to parse {}: {}'.format(self.path, 'unexpected section header found'))
|
||||
self.unreleased.append(line)
|
||||
|
||||
self.unreleased = self._normalize_section(self.unreleased)
|
||||
|
||||
@staticmethod
|
||||
def _normalize_section(lines):
|
||||
"""Parse a changelog entry and output a normalized form"""
|
||||
sections = {}
|
||||
current_section_name = None
|
||||
current_section_contents = []
|
||||
for line in lines:
|
||||
line = line.strip()
|
||||
if not line or EMPTY_RE.match(line):
|
||||
continue
|
||||
match = SECTION_RE.match(line)
|
||||
if match:
|
||||
if current_section_contents:
|
||||
sections[current_section_name] = current_section_contents
|
||||
current_section_contents = []
|
||||
current_section_name = match.group(1)
|
||||
if current_section_name not in VALID_SECTIONS:
|
||||
raise ValueError("Section '{}' is not valid".format(current_section_name))
|
||||
continue
|
||||
match = ENTRY_RE.match(line)
|
||||
if match:
|
||||
current_section_contents.append(match.group(1))
|
||||
continue
|
||||
raise Exception('Something is wrong with line: {}'.format(line))
|
||||
if current_section_contents:
|
||||
sections[current_section_name] = current_section_contents
|
||||
|
||||
output = []
|
||||
for section in VALID_SECTIONS:
|
||||
if section not in sections:
|
||||
continue
|
||||
output.append('### {}'.format(section))
|
||||
for entry in sections[section]:
|
||||
output.append(' * {}'.format(entry))
|
||||
output.append("\n")
|
||||
return output
|
||||
|
||||
def get_unreleased(self):
|
||||
return '\n'.join(self.unreleased) if self.unreleased else None
|
||||
|
||||
def bump(self, version):
|
||||
if not self.unreleased:
|
||||
return
|
||||
|
||||
today = datetime.datetime.today()
|
||||
header = "## [{}] - {}\n\n".format(version, today.strftime('%Y-%m-%d'))
|
||||
|
||||
changelog_data = (
|
||||
''.join(self.start) +
|
||||
TEMPLATE +
|
||||
header +
|
||||
'\n'.join(self.unreleased) + '\n\n'
|
||||
+ ''.join(self.rest)
|
||||
)
|
||||
|
||||
with open(self.path, 'w') as fp:
|
||||
fp.write(changelog_data)
|
|
@ -1,122 +0,0 @@
|
|||
#!/bin/bash
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
LINUX=false
|
||||
OSX=false
|
||||
|
||||
if [ "$(uname)" == "Darwin" ]; then
|
||||
OSX=true
|
||||
elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then
|
||||
LINUX=true
|
||||
else
|
||||
echo "Platform detection failed"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
SUDO=''
|
||||
if (( $EUID != 0 )); then
|
||||
SUDO='sudo'
|
||||
fi
|
||||
|
||||
cmd_exists() {
|
||||
command -v "$1" >/dev/null 2>&1
|
||||
return $?
|
||||
}
|
||||
|
||||
set +eu
|
||||
GITUSERNAME=$(git config --global --get user.name)
|
||||
if [ -z "$GITUSERNAME" ]; then
|
||||
git config --global user.name "$(whoami)"
|
||||
fi
|
||||
GITEMAIL=$(git config --global --get user.email)
|
||||
if [ -z "$GITEMAIL" ]; then
|
||||
git config --global user.email "$(whoami)@lbry.io"
|
||||
fi
|
||||
set -eu
|
||||
|
||||
|
||||
if $LINUX; then
|
||||
INSTALL="$SUDO apt-get install --no-install-recommends -y"
|
||||
$INSTALL build-essential libssl-dev libffi-dev libgmp3-dev python2.7-dev libsecret-1-dev curl
|
||||
elif $OSX; then
|
||||
if ! cmd_exists brew; then
|
||||
/usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)"
|
||||
else
|
||||
brew update
|
||||
fi
|
||||
fi
|
||||
|
||||
|
||||
if ! cmd_exists python; then
|
||||
if $LINUX; then
|
||||
$INSTALL python2.7
|
||||
elif $OSX; then
|
||||
brew install python
|
||||
else
|
||||
echo "python2.7 required"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
PYTHON_VERSION=$(python -c 'import sys; print(".".join(map(str, sys.version_info[:2])))')
|
||||
if [ "$PYTHON_VERSION" != "2.7" ]; then
|
||||
echo "Python 2.7 required"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
if ! cmd_exists pip; then
|
||||
if $LINUX; then
|
||||
$INSTALL python-pip
|
||||
elif $OSX; then
|
||||
$SUDO easy_install pip
|
||||
else
|
||||
echo "pip required"
|
||||
exit 1
|
||||
fi
|
||||
$SUDO pip install --upgrade pip
|
||||
fi
|
||||
|
||||
if $LINUX && [ "$(pip list --format=columns | grep setuptools | wc -l)" -ge 1 ]; then
|
||||
$SUDO pip install setuptools
|
||||
fi
|
||||
|
||||
if ! cmd_exists virtualenv; then
|
||||
$SUDO pip install virtualenv
|
||||
fi
|
||||
|
||||
if ! cmd_exists node; then
|
||||
if $LINUX; then
|
||||
curl -sL https://deb.nodesource.com/setup_8.x | $SUDO -E bash -
|
||||
$INSTALL nodejs
|
||||
elif $OSX; then
|
||||
brew install node
|
||||
else
|
||||
echo "node required"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! cmd_exists yarn; then
|
||||
if $LINUX; then
|
||||
curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | $SUDO apt-key add -
|
||||
echo "deb https://dl.yarnpkg.com/debian/ stable main" | $SUDO tee /etc/apt/sources.list.d/yarn.list
|
||||
$SUDO apt-get update
|
||||
$SUDO apt-get -o Dpkg::Options::="--force-overwrite" install yarn
|
||||
elif $OSX; then
|
||||
brew install yarn
|
||||
else
|
||||
echo "yarn required"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
if ! cmd_exists unzip; then
|
||||
if $LINUX; then
|
||||
$INSTALL unzip
|
||||
else
|
||||
echo "unzip required"
|
||||
exit 1
|
||||
fi
|
||||
fi
|
Binary file not shown.
250
build/release.py
250
build/release.py
|
@ -1,250 +0,0 @@
|
|||
"""Bump version and create Github release
|
||||
|
||||
This script should be run locally, not on a build server.
|
||||
"""
|
||||
import argparse
|
||||
import contextlib
|
||||
import os
|
||||
import json
|
||||
import re
|
||||
import requests
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import git
|
||||
import github
|
||||
|
||||
import changelog
|
||||
|
||||
ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__)))
|
||||
APP_PACKAGE_JSON_FILE = os.path.join(ROOT, 'package.json')
|
||||
|
||||
|
||||
def main():
|
||||
bumpversion_parts = get_bumpversion_parts()
|
||||
|
||||
parser = argparse.ArgumentParser()
|
||||
parser.add_argument("part", choices=bumpversion_parts, help="part of version to bump")
|
||||
parser.add_argument("--skip-sanity-checks", action="store_true")
|
||||
parser.add_argument("--skip-push", action="store_true")
|
||||
parser.add_argument("--dry-run", action="store_true")
|
||||
parser.add_argument("--confirm", action="store_true")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.dry_run:
|
||||
print "DRY RUN. Nothing will be committed/pushed."
|
||||
|
||||
repo = Repo('lbry-app', args.part, ROOT)
|
||||
branch = 'master'
|
||||
|
||||
print 'Current version: {}'.format(repo.current_version)
|
||||
print 'New version: {}'.format(repo.new_version)
|
||||
with open(APP_PACKAGE_JSON_FILE, 'r') as f:
|
||||
package_settings = json.load(f)['lbrySettings']
|
||||
daemon_url_template = package_settings['lbrynetDaemonUrlTemplate']
|
||||
daemon_version = package_settings['lbrynetDaemonVersion']
|
||||
print 'Daemon version: {} ({})'.format(daemon_version, daemon_url_template.replace('DAEMONVER', daemon_version))
|
||||
|
||||
if not args.confirm and not confirm():
|
||||
print "Aborting"
|
||||
return 1
|
||||
|
||||
if not args.skip_sanity_checks:
|
||||
run_sanity_checks(repo, branch)
|
||||
repo.assert_new_tag_is_absent()
|
||||
|
||||
is_rc = re.search('-rc\d+$', repo.new_version) is not None
|
||||
# only have a release message for real releases, not for RCs
|
||||
release_msg = '' if is_rc else repo.get_unreleased_changelog()
|
||||
|
||||
if args.dry_run:
|
||||
print "rc: " + ("yes" if is_rc else "no")
|
||||
print "release message: \n" + (release_msg or " NO MESSAGE FOR RCs")
|
||||
return
|
||||
|
||||
gh_token = get_gh_token()
|
||||
auth = github.Github(gh_token)
|
||||
github_repo = auth.get_repo('lbryio/lbry-app')
|
||||
|
||||
if not is_rc:
|
||||
repo.bump_changelog()
|
||||
repo.bumpversion()
|
||||
|
||||
new_tag = repo.get_new_tag()
|
||||
github_repo.create_git_release(new_tag, new_tag, release_msg, draft=True, prerelease=is_rc)
|
||||
|
||||
if args.skip_push:
|
||||
print (
|
||||
'Skipping push; you will have to reset and delete tags if '
|
||||
'you want to run this script again.'
|
||||
)
|
||||
else:
|
||||
repo.git_repo.git.push(follow_tags=True, recurse_submodules='check')
|
||||
|
||||
|
||||
class Repo(object):
|
||||
def __init__(self, name, part, directory):
|
||||
self.name = name
|
||||
self.part = part
|
||||
if not self.part:
|
||||
raise Exception('Part required')
|
||||
self.directory = directory
|
||||
self.git_repo = git.Repo(self.directory)
|
||||
self._bumped = False
|
||||
|
||||
self.current_version = self._get_current_version()
|
||||
self.new_version = self._get_new_version()
|
||||
self._changelog = changelog.Changelog(os.path.join(self.directory, 'CHANGELOG.md'))
|
||||
|
||||
def get_new_tag(self):
|
||||
return 'v' + self.new_version
|
||||
|
||||
def get_unreleased_changelog(self):
|
||||
return self._changelog.get_unreleased()
|
||||
|
||||
def bump_changelog(self):
|
||||
self._changelog.bump(self.new_version)
|
||||
with pushd(self.directory):
|
||||
self.git_repo.git.add(os.path.basename(self._changelog.path))
|
||||
|
||||
def _get_current_version(self):
|
||||
with pushd(self.directory):
|
||||
output = subprocess.check_output(
|
||||
['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part])
|
||||
return re.search('^current_version=(.*)$', output, re.M).group(1)
|
||||
|
||||
def _get_new_version(self):
|
||||
with pushd(self.directory):
|
||||
output = subprocess.check_output(
|
||||
['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part])
|
||||
return re.search('^new_version=(.*)$', output, re.M).group(1)
|
||||
|
||||
def bumpversion(self):
|
||||
if self._bumped:
|
||||
raise Exception('Cowardly refusing to bump a repo twice')
|
||||
with pushd(self.directory):
|
||||
subprocess.check_call(['bumpversion', '--allow-dirty', self.part])
|
||||
self._bumped = True
|
||||
|
||||
def assert_new_tag_is_absent(self):
|
||||
new_tag = self.get_new_tag()
|
||||
tags = self.git_repo.git.tag()
|
||||
if new_tag in tags.split('\n'):
|
||||
raise Exception('Tag {} is already present in repo {}.'.format(new_tag, self.name))
|
||||
|
||||
def is_behind(self, branch):
|
||||
self.git_repo.remotes.origin.fetch()
|
||||
rev_list = '{branch}...origin/{branch}'.format(branch=branch)
|
||||
commits_behind = self.git_repo.git.rev_list(rev_list, right_only=True, count=True)
|
||||
commits_behind = int(commits_behind)
|
||||
return commits_behind > 0
|
||||
|
||||
|
||||
def get_bumpversion_parts():
|
||||
with pushd(ROOT):
|
||||
output = subprocess.check_output([
|
||||
'bumpversion', '--dry-run', '--list', '--allow-dirty', 'fake-part',
|
||||
])
|
||||
parse_line = re.search('^parse=(.*)$', output, re.M).group(1)
|
||||
return tuple(re.findall('<([^>]+)>', parse_line))
|
||||
|
||||
|
||||
def get_gh_token():
|
||||
if 'GH_TOKEN' in os.environ:
|
||||
return os.environ['GH_TOKEN']
|
||||
else:
|
||||
print """
|
||||
Please enter your personal access token. If you don't have one
|
||||
See https://github.com/lbryio/lbry-app/wiki/Release-Script#generate-a-personal-access-token
|
||||
for instructions on how to generate one.
|
||||
|
||||
You can also set the GH_TOKEN environment variable to avoid seeing this message
|
||||
in the future"""
|
||||
return raw_input('token: ').strip()
|
||||
|
||||
|
||||
def confirm():
|
||||
try:
|
||||
return raw_input('Is this what you want? [y/N] ').strip().lower() == 'y'
|
||||
except KeyboardInterrupt:
|
||||
return False
|
||||
|
||||
|
||||
def run_sanity_checks(repo, branch):
|
||||
if repo.git_repo.is_dirty():
|
||||
print 'Cowardly refusing to release a dirty repo'
|
||||
sys.exit(1)
|
||||
if repo.git_repo.active_branch.name != branch:
|
||||
print 'Cowardly refusing to release when not on the {} branch'.format(branch)
|
||||
sys.exit(1)
|
||||
if repo.is_behind(branch):
|
||||
print 'Cowardly refusing to release when behind origin'
|
||||
sys.exit(1)
|
||||
if not is_custom_bumpversion_version():
|
||||
print (
|
||||
'Install LBRY\'s fork of bumpversion: '
|
||||
'pip install -U git+https://github.com/lbryio/bumpversion.git'
|
||||
)
|
||||
sys.exit(1)
|
||||
if not check_daemon_urls():
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
def check_daemon_urls():
|
||||
with open(APP_PACKAGE_JSON_FILE, 'r') as f:
|
||||
package_settings = json.load(f)['lbrySettings']
|
||||
|
||||
daemon_url_template = package_settings['lbrynetDaemonUrlTemplate']
|
||||
daemon_version = package_settings['lbrynetDaemonVersion']
|
||||
|
||||
if "OSNAME" not in daemon_url_template:
|
||||
print "Daemon URL must include the string \"OSNAME\""
|
||||
return False
|
||||
elif "DAEMONVER" not in daemon_url_template:
|
||||
print "Daemon URL must include the string \"DAEMONVER\""
|
||||
return False
|
||||
|
||||
for osname in ('linux', 'macos', 'windows'):
|
||||
if not check_url(daemon_url_template.replace('DAEMONVER', daemon_version).replace('OSNAME', osname)):
|
||||
print "Daemon URL for", osname, " does not work"
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_url(url):
|
||||
url = url.strip()
|
||||
r = requests.head(url)
|
||||
if r.status_code >= 400:
|
||||
return False
|
||||
elif r.status_code >= 300:
|
||||
new_location = r.headers.get('Location').strip()
|
||||
if new_location == url:
|
||||
# self-loop
|
||||
return False
|
||||
if "amazonaws.com" in new_location:
|
||||
# HEAD doesnt work on s3 links, so assume its good
|
||||
return True
|
||||
return check_url(new_location)
|
||||
return True
|
||||
|
||||
|
||||
def is_custom_bumpversion_version():
|
||||
try:
|
||||
output = subprocess.check_output(['bumpversion', '-v'], stderr=subprocess.STDOUT).strip()
|
||||
if output == 'bumpversion 0.5.4-lbry':
|
||||
return True
|
||||
except (subprocess.CalledProcessError, OSError):
|
||||
pass
|
||||
return False
|
||||
|
||||
|
||||
@contextlib.contextmanager
|
||||
def pushd(new_dir):
|
||||
previous_dir = os.getcwd()
|
||||
os.chdir(new_dir)
|
||||
yield
|
||||
os.chdir(previous_dir)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
|
@ -1,6 +0,0 @@
|
|||
GitPython==2.1.1
|
||||
PyGithub==1.32
|
||||
requests[security]==2.13.0
|
||||
uritemplate==3.0.0
|
||||
git+https://github.com/lbryio/bumpversion.git
|
||||
boto3==1.4.4
|
|
@ -1,146 +0,0 @@
|
|||
import glob
|
||||
import json
|
||||
import os
|
||||
import platform
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
import github
|
||||
import uritemplate
|
||||
import boto3
|
||||
|
||||
def main():
|
||||
upload_to_github_if_tagged('lbryio/lbry-app')
|
||||
|
||||
|
||||
def get_asset_path():
|
||||
this_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
system = platform.system()
|
||||
if system == 'Darwin':
|
||||
suffix = 'dmg'
|
||||
elif system == 'Linux':
|
||||
suffix = 'deb'
|
||||
elif system == 'Windows':
|
||||
suffix = 'exe'
|
||||
else:
|
||||
raise Exception("I don't know about any artifact on {}".format(system))
|
||||
|
||||
return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.' + suffix)[0])
|
||||
|
||||
def get_update_asset_path():
|
||||
# Get the asset used used for updates. On Mac, this is a .zip; on
|
||||
# Windows it's just the installer file.
|
||||
if platform.system() == 'Darwin':
|
||||
this_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.zip')[0])
|
||||
else:
|
||||
return get_asset_path()
|
||||
|
||||
|
||||
def get_latest_file_path():
|
||||
# The update metadata file is called latest.yml on Windows, latest-mac.yml on
|
||||
# Mac, latest-linux.yml on Linux
|
||||
this_dir = os.path.dirname(os.path.realpath(__file__))
|
||||
|
||||
latestfilematches = glob.glob(this_dir + '/../dist/latest*.yml')
|
||||
|
||||
return latestfilematches[0] if latestfilematches else None
|
||||
|
||||
def upload_to_github_if_tagged(repo_name):
|
||||
try:
|
||||
current_tag = subprocess.check_output(
|
||||
['git', 'describe', '--exact-match', 'HEAD']).strip()
|
||||
except subprocess.CalledProcessError:
|
||||
print 'Not uploading to GitHub as we are not currently on a tag'
|
||||
return 1
|
||||
|
||||
print "Current tag: " + current_tag
|
||||
|
||||
if 'GH_TOKEN' not in os.environ:
|
||||
print 'Must set GH_TOKEN in order to publish assets to a release'
|
||||
return 1
|
||||
|
||||
gh_token = os.environ['GH_TOKEN']
|
||||
auth = github.Github(gh_token)
|
||||
repo = auth.get_repo(repo_name)
|
||||
|
||||
if not check_repo_has_tag(repo, current_tag):
|
||||
print 'Tag {} is not in repo {}'.format(current_tag, repo)
|
||||
# TODO: maybe this should be an error
|
||||
return 1
|
||||
|
||||
asset_path = get_asset_path()
|
||||
print "Uploading " + asset_path + " to Github tag " + current_tag
|
||||
release = get_github_release(repo, current_tag)
|
||||
upload_asset_to_github(release, asset_path, gh_token)
|
||||
|
||||
|
||||
def check_repo_has_tag(repo, target_tag):
|
||||
tags = repo.get_tags().get_page(0)
|
||||
for tag in tags:
|
||||
if tag.name == target_tag:
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def get_github_release(repo, current_tag):
|
||||
for release in repo.get_releases():
|
||||
if release.tag_name == current_tag:
|
||||
return release
|
||||
raise Exception('No release for {} was found'.format(current_tag))
|
||||
|
||||
|
||||
def upload_asset_to_github(release, asset_to_upload, token):
|
||||
basename = os.path.basename(asset_to_upload)
|
||||
for asset in release.raw_data['assets']:
|
||||
if asset['name'] == basename:
|
||||
print 'File {} has already been uploaded to {}'.format(basename, release.tag_name)
|
||||
return
|
||||
|
||||
upload_uri = uritemplate.expand(release.upload_url, {'name': basename})
|
||||
count = 0
|
||||
while count < 10:
|
||||
try:
|
||||
output = _curl_uploader(upload_uri, asset_to_upload, token)
|
||||
if 'errors' in output:
|
||||
raise Exception(output)
|
||||
else:
|
||||
print 'Successfully uploaded to {}'.format(output['browser_download_url'])
|
||||
except Exception:
|
||||
print 'Failed uploading on attempt {}'.format(count + 1)
|
||||
count += 1
|
||||
|
||||
|
||||
def _curl_uploader(upload_uri, asset_to_upload, token):
|
||||
# using requests.post fails miserably with SSL EPIPE errors. I spent
|
||||
# half a day trying to debug before deciding to switch to curl.
|
||||
#
|
||||
# TODO: actually set the content type
|
||||
print 'Using curl to upload {} to {}'.format(asset_to_upload, upload_uri)
|
||||
cmd = [
|
||||
'curl',
|
||||
'-sS',
|
||||
'-X', 'POST',
|
||||
'-u', ':{}'.format(os.environ['GH_TOKEN']),
|
||||
'--header', 'Content-Type: application/octet-stream',
|
||||
'--data-binary', '@-',
|
||||
upload_uri
|
||||
]
|
||||
# '-d', '{"some_key": "some_value"}',
|
||||
print 'Calling curl:'
|
||||
print cmd
|
||||
print
|
||||
with open(asset_to_upload, 'rb') as fp:
|
||||
p = subprocess.Popen(cmd, stdin=fp, stderr=subprocess.PIPE, stdout=subprocess.PIPE)
|
||||
stdout, stderr = p.communicate()
|
||||
print 'curl return code:', p.returncode
|
||||
if stderr:
|
||||
print 'stderr output from curl:'
|
||||
print stderr
|
||||
print 'stdout from curl:'
|
||||
print stdout
|
||||
return json.loads(stdout)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
Loading…
Reference in a new issue