From 2999ad4f6eed58b4d0651bad9f6300dadfe865d9 Mon Sep 17 00:00:00 2001 From: Igor Gassmann Date: Sun, 25 Mar 2018 14:05:48 -0400 Subject: [PATCH] chore: clean-up unused files --- .appveyor.yml | 27 ----- .bumpversion.cfg | 22 ---- Dangerfile | 6 - build.sh | 4 - build/build.ps1 | 32 ------ build/build.sh | 59 ---------- build/changelog.py | 129 --------------------- build/install_deps.sh | 122 -------------------- build/lbry3.pfx.enc | Bin 5408 -> 0 bytes build/release.py | 250 ----------------------------------------- build/requirements.txt | 6 - build/upload_assets.py | 146 ------------------------ 12 files changed, 803 deletions(-) delete mode 100644 .appveyor.yml delete mode 100644 .bumpversion.cfg delete mode 100644 Dangerfile delete mode 100755 build.sh delete mode 100644 build/build.ps1 delete mode 100755 build/build.sh delete mode 100644 build/changelog.py delete mode 100755 build/install_deps.sh delete mode 100644 build/lbry3.pfx.enc delete mode 100644 build/release.py delete mode 100644 build/requirements.txt delete mode 100644 build/upload_assets.py diff --git a/.appveyor.yml b/.appveyor.yml deleted file mode 100644 index 237d2ee1b..000000000 --- a/.appveyor.yml +++ /dev/null @@ -1,27 +0,0 @@ -# Test against the latest version of this Node.js version -environment: - nodejs_version: 7 - GH_TOKEN: - secure: LiI5jyuHUw6XbH4kC3gP1HX4P/v4rwD/gCNtaFhQu2AvJz1/1wALkp5ECnIxRySN - pfx_key: - secure: 1mwqyRy7hDqDjDK+TIAoaXyXzpNgwruFNA6TPkinUcVM7A+NLD33RQLnfnwVy+R5ovD2pUfhQ6+N0Fqebv6tZh436LIEsock+6IOdpgFwrg= - AWS_ACCESS_KEY_ID: - secure: iVGwoJ7ogspjSmuqr+haVPLglSgQsp6tUZx6mIlKH7Q= - AWS_SECRET_ACCESS_KEY: - secure: zKaqdZGPl0exDL5YhJkb33prSemC9Rzg9S7Lw2wFy1WnJ6ffgl6mQH7jqJDUTqsY - CSC_LINK: build\lbry3.pfx - CSC_KEY_PASSWORD: - secure: u6DydPcdrUJlxGL9uc7yQRYG8+5rY6aAEE9nfCSzFyNzZlX9NniOp8Uh5ZKQqX7bGEngLI6ipbLfiJvn0XFnhbn2iTkOuMqOXVJVOehvwlQ= - -skip_branch_with_pr: false - -clone_folder: C:\projects\lbry-app - -build_script: - - ps: build\build.ps1 - -test: off - -artifacts: - - path: dist\*.exe - name: LBRY diff --git a/.bumpversion.cfg b/.bumpversion.cfg deleted file mode 100644 index fb5518f9f..000000000 --- a/.bumpversion.cfg +++ /dev/null @@ -1,22 +0,0 @@ -[bumpversion] -current_version = 0.20.0 -commit = True -tag = True -parse = (?P\d+)\.(?P\d+)\.(?P\d+)(-(?P[a-z]+)(?P\d+))? -serialize = - {major}.{minor}.{patch}-{release}{candidate} - {major}.{minor}.{patch} - -[bumpversion:part:candidate] -first_value = 1 - -[bumpversion:part:release] -optional_value = production -values = - rc - production - -[bumpversion:file:package.json] -search = "version": "{current_version}" -replace = "version": "{new_version}" - diff --git a/Dangerfile b/Dangerfile deleted file mode 100644 index 6cfa33ba7..000000000 --- a/Dangerfile +++ /dev/null @@ -1,6 +0,0 @@ -# Add a CHANGELOG entry for app changes -has_app_changes = !(git.modified_files.grep(/js/).empty? && git.modified_files.grep(/scss/).empty?) -if !git.modified_files.include?("CHANGELOG.md") && has_app_changes - fail("Please include a CHANGELOG entry.") - message "See http://keepachangelog.com/en/0.3.0/ for details on good changelog guidelines" -end diff --git a/build.sh b/build.sh deleted file mode 100755 index ec3e6d703..000000000 --- a/build.sh +++ /dev/null @@ -1,4 +0,0 @@ -#!/bin/bash -# this is here because teamcity runs /build.sh to build the project -DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" && pwd )" -"$DIR/build/build.sh" diff --git a/build/build.ps1 b/build/build.ps1 deleted file mode 100644 index 730fd80af..000000000 --- a/build/build.ps1 +++ /dev/null @@ -1,32 +0,0 @@ -pip install -r build\requirements.txt - -# Decrypt cert -nuget install secure-file -ExcludeVersion -secure-file\tools\secure-file -decrypt build\lbry3.pfx.enc -secret "$env:pfx_key" - -# Get the latest stable version of Node.js or io.js -Install-Product node $env:nodejs_version -npm install -g yarn -yarn install - -# clean dist\ -if (Test-Path -Path dist\) { - Remove-Item -Recurse -Force dist\ -} -New-Item -ItemType directory -Path dist\ - -# get daemon and cli executable -$package_settings = (Get-Content package.json -Raw | ConvertFrom-Json).lbrySettings -$daemon_ver = $package_settings.lbrynetDaemonVersion -$daemon_url_template = $package_settings.lbrynetDaemonUrlTemplate -$daemon_url = $daemon_url_template.Replace('OSNAME', 'windows').Replace('DAEMONVER', $daemon_ver) -Invoke-WebRequest -Uri $daemon_url -OutFile daemon.zip -Expand-Archive daemon.zip -DestinationPath static\daemon\ -dir static\daemon\ # verify that daemon binary is there -rm daemon.zip - -# build electron app -yarn build -dir dist # verify that binary was built/named correctly - -python build\upload_assets.py diff --git a/build/build.sh b/build/build.sh deleted file mode 100755 index 7ce077b3d..000000000 --- a/build/build.sh +++ /dev/null @@ -1,59 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -ROOT="$( cd "$( dirname "${BASH_SOURCE[0]}" )/.." && pwd )" -cd "$ROOT" -BUILD_DIR="$ROOT/build" - -LINUX=false -OSX=false -if [ "$(uname)" == "Darwin" ]; then - echo -e "\033[0;32mBuilding for OSX\x1b[m" - OSX=true -elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then - echo -e "\033[0;32mBuilding for Linux\x1b[m" - LINUX=true -else - echo -e "\033[1;31mPlatform detection failed\x1b[m" - exit 1 -fi - -if $OSX; then - ICON="$BUILD_DIR/icon.icns" -else - ICON="$BUILD_DIR/icons/48x48.png" -fi - -FULL_BUILD="${FULL_BUILD:-false}" -if [ -n "${TEAMCITY_VERSION:-}" -o -n "${APPVEYOR:-}" ]; then - FULL_BUILD="true" -fi - -DEPS="${DEPS:-$FULL_BUILD}" -if [ "$DEPS" != "true" ]; then - echo -e "\033[1;36mDependencies will NOT be installed. Run with \"INSTALL_DEPENDENCIES=true\" to install dependencies, or \"FULL_BUILD=true\" to install dependencies and build a complete app.\x1b[m" -else - # install dependencies - echo -e "\033[0;32mInstalling Dependencies\x1b[m" - "$BUILD_DIR/install_deps.sh" -fi - -[ -d "$ROOT/dist" ] && rm -rf "$ROOT/dist" - -yarn install - -################### -# Build the app # -################### -if [ "$FULL_BUILD" == "true" ]; then - if $OSX; then - security unlock-keychain -p ${KEYCHAIN_PASSWORD} osx-build.keychain - fi - - yarn build - - echo -e '\033[0;32mBuild and packaging complete.\x1b[m' -else - echo -e 'Build complete. Run \033[1;31myarn dev\x1b[m to launch the app' -fi diff --git a/build/changelog.py b/build/changelog.py deleted file mode 100644 index 322fc30a7..000000000 --- a/build/changelog.py +++ /dev/null @@ -1,129 +0,0 @@ -import datetime -import re - -CHANGELOG_START_RE = re.compile(r'^\#\# \[Unreleased\]') -CHANGELOG_END_RE = re.compile(r'^\#\# \[.*\] - \d{4}-\d{2}-\d{2}') -# if we come across a section header between two release section headers -# then we probably have an improperly formatted changelog -CHANGELOG_ERROR_RE = re.compile(r'^\#\# ') -SECTION_RE = re.compile(r'^\#\#\# (.*)$') -EMPTY_RE = re.compile(r'^\w*\*\w*$') -ENTRY_RE = re.compile(r'\* (.*)') -VALID_SECTIONS = ['Added', 'Changed', 'Deprecated', 'Removed', 'Fixed', 'Security'] - -# allocate some entries to cut-down on merge conflicts -TEMPLATE = """### Added - * - * - -### Changed - * - * - -### Fixed - * - * - -### Deprecated - * - * - -### Removed - * - * - -""" - - -class Changelog(object): - def __init__(self, path): - self.path = path - self.start = [] - self.unreleased = [] - self.rest = [] - self._parse() - - def _parse(self): - with open(self.path) as fp: - lines = fp.readlines() - - unreleased_start_found = False - unreleased_end_found = False - - for line in lines: - if not unreleased_start_found: - self.start.append(line) - if CHANGELOG_START_RE.search(line): - unreleased_start_found = True - continue - if unreleased_end_found: - self.rest.append(line) - continue - if CHANGELOG_END_RE.search(line): - self.rest.append(line) - unreleased_end_found = True - continue - if CHANGELOG_ERROR_RE.search(line): - raise Exception( - 'Failed to parse {}: {}'.format(self.path, 'unexpected section header found')) - self.unreleased.append(line) - - self.unreleased = self._normalize_section(self.unreleased) - - @staticmethod - def _normalize_section(lines): - """Parse a changelog entry and output a normalized form""" - sections = {} - current_section_name = None - current_section_contents = [] - for line in lines: - line = line.strip() - if not line or EMPTY_RE.match(line): - continue - match = SECTION_RE.match(line) - if match: - if current_section_contents: - sections[current_section_name] = current_section_contents - current_section_contents = [] - current_section_name = match.group(1) - if current_section_name not in VALID_SECTIONS: - raise ValueError("Section '{}' is not valid".format(current_section_name)) - continue - match = ENTRY_RE.match(line) - if match: - current_section_contents.append(match.group(1)) - continue - raise Exception('Something is wrong with line: {}'.format(line)) - if current_section_contents: - sections[current_section_name] = current_section_contents - - output = [] - for section in VALID_SECTIONS: - if section not in sections: - continue - output.append('### {}'.format(section)) - for entry in sections[section]: - output.append(' * {}'.format(entry)) - output.append("\n") - return output - - def get_unreleased(self): - return '\n'.join(self.unreleased) if self.unreleased else None - - def bump(self, version): - if not self.unreleased: - return - - today = datetime.datetime.today() - header = "## [{}] - {}\n\n".format(version, today.strftime('%Y-%m-%d')) - - changelog_data = ( - ''.join(self.start) + - TEMPLATE + - header + - '\n'.join(self.unreleased) + '\n\n' - + ''.join(self.rest) - ) - - with open(self.path, 'w') as fp: - fp.write(changelog_data) diff --git a/build/install_deps.sh b/build/install_deps.sh deleted file mode 100755 index c31b010f0..000000000 --- a/build/install_deps.sh +++ /dev/null @@ -1,122 +0,0 @@ -#!/bin/bash - -set -euo pipefail - -LINUX=false -OSX=false - -if [ "$(uname)" == "Darwin" ]; then - OSX=true -elif [ "$(expr substr $(uname -s) 1 5)" == "Linux" ]; then - LINUX=true -else - echo "Platform detection failed" - exit 1 -fi - - -SUDO='' -if (( $EUID != 0 )); then - SUDO='sudo' -fi - -cmd_exists() { - command -v "$1" >/dev/null 2>&1 - return $? -} - -set +eu -GITUSERNAME=$(git config --global --get user.name) -if [ -z "$GITUSERNAME" ]; then - git config --global user.name "$(whoami)" -fi -GITEMAIL=$(git config --global --get user.email) -if [ -z "$GITEMAIL" ]; then - git config --global user.email "$(whoami)@lbry.io" -fi -set -eu - - -if $LINUX; then - INSTALL="$SUDO apt-get install --no-install-recommends -y" - $INSTALL build-essential libssl-dev libffi-dev libgmp3-dev python2.7-dev libsecret-1-dev curl -elif $OSX; then - if ! cmd_exists brew; then - /usr/bin/ruby -e "$(curl -fsSL https://raw.githubusercontent.com/Homebrew/install/master/install)" - else - brew update - fi -fi - - -if ! cmd_exists python; then - if $LINUX; then - $INSTALL python2.7 - elif $OSX; then - brew install python - else - echo "python2.7 required" - exit 1 - fi -fi - -PYTHON_VERSION=$(python -c 'import sys; print(".".join(map(str, sys.version_info[:2])))') -if [ "$PYTHON_VERSION" != "2.7" ]; then - echo "Python 2.7 required" - exit 1 -fi - -if ! cmd_exists pip; then - if $LINUX; then - $INSTALL python-pip - elif $OSX; then - $SUDO easy_install pip - else - echo "pip required" - exit 1 - fi - $SUDO pip install --upgrade pip -fi - -if $LINUX && [ "$(pip list --format=columns | grep setuptools | wc -l)" -ge 1 ]; then - $SUDO pip install setuptools -fi - -if ! cmd_exists virtualenv; then - $SUDO pip install virtualenv -fi - -if ! cmd_exists node; then - if $LINUX; then - curl -sL https://deb.nodesource.com/setup_8.x | $SUDO -E bash - - $INSTALL nodejs - elif $OSX; then - brew install node - else - echo "node required" - exit 1 - fi -fi - -if ! cmd_exists yarn; then - if $LINUX; then - curl -sS https://dl.yarnpkg.com/debian/pubkey.gpg | $SUDO apt-key add - - echo "deb https://dl.yarnpkg.com/debian/ stable main" | $SUDO tee /etc/apt/sources.list.d/yarn.list - $SUDO apt-get update - $SUDO apt-get -o Dpkg::Options::="--force-overwrite" install yarn - elif $OSX; then - brew install yarn - else - echo "yarn required" - exit 1 - fi -fi - -if ! cmd_exists unzip; then - if $LINUX; then - $INSTALL unzip - else - echo "unzip required" - exit 1 - fi -fi diff --git a/build/lbry3.pfx.enc b/build/lbry3.pfx.enc deleted file mode 100644 index 330cfc05b4d34fec9470f5a774f1f0255a9292e8..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 5408 zcmV+*72oQbBgHYKDCSWl!COxwy9ttCgq%0k?}y@h+3m9 zzjHXZOPw6r`@}XhFWL=Z5EP|TmT1M;OR$9lvk(ZP00ginK9mTRTWh3DA_l)8&w$G3 zCjVmU&#**CboM&oN(heOMead<@!3#zcY|T>5U&2P>)vBXhNxdK=>=Z%J$?6BzZRxz3w>3YkB+VH&15_NB))6;e+X6<6#AjfEE z->j&!kF`)H4dZqqw@b67J8~9VJFT3xBe!zId`fPSw^3C_(__o$%CCTA2-GaA-*Ipj zF7W=jmiIdD6t}VaxbXX8n=v}~4tGG+d_6WtJAFyK4gQPkiIW8TbDs9(+rDn79YkeU zBn0(mxD|J%mgrE)W6S(ni+gPUoHYd&WBWx3`*+(don=m!hLocwdc|>5kAZx9Pp9t* z!JeGo!^r-uJR#P0L@FJD8AP^i|AXRJy6!xGV^TBcyk%tcELgZXtPALE65E|%R{hN= zMtxlIvEM%7Bt<{JK%M!Cczqx)yKB(#geGD)fe^#^qG8INmbt}adq%Okf3 zm;}qLX>OZ%q`wdsiQHZaWQjZ?r$Z3)8-R@#%oq9PQpdjKc1=*zY<(aK*rK?G0u0Tg zMiGXm#9qx#11}57WBl~b&udXBS9Lif|0xWm6BDLSF*UDFFh~R|TS!bL;U%TWT_fnQ zEnb#rs_ICB&eU-E%4QY<;1lAXt6Bq5dhjyBw5ceux0pT{ek_Km&Q?D19Aa%xT^j=3 zk}nDwQ1L^olC8olbVS ze$HBaltdUTL?#+@Jkr0-pssv$b!=KX@lj01mWT~H=BwdU-{~udZu42O=XJ3Oa-Z+O z1SQ|x;QcR)(ViffT%^rk&r>v~1+RjZ#MalLEnK_W6fgRKL?Md7EK#2^J}y@cFS9#s z&()KtTM7*Puq|UaTxro0#%PEA!%&A%us$SFPd=yO2l5&nH4u<5UwvRFmr>&DOP3EGNv2Sb zWNX=1zgp-f5IJdn=6~zv`YE4h!iD6W^B&{^dD6uYVy5dA!9ytY4t&)kCbu_Qq~gjv z?)Q94KmoDq(^(4);G902Fs@zO=+O!w*-sj*kG9Ufq14B4?Ze7`eIYP?Q)h$nBV(qZ z39fcH&h9=tQETUYR1YWng zxQ+i#PbeLzeBMqZ0X`vVCZH6gO|~8rO$e3uX??AjQy4K)u2Y$Th{+T-<>Q>T+lUS| z)++M)H=|EPWzpJ%EO25|I?L30w0y`SudOI<{tkQBCP!mJomI%^kP`rPgpfB zS{J?kMNnc@NBOvh3C;#qjAj7WuoRyb6Uln?eh{|`7wE&Rr5*xdj=00-;#u5$Fw@v{ z*KSaV?fb~skK&g9+ea1=ycT&(^E5mR8^k#p9^%zfeEZ=c&Z2_xM&|Bph;>*-3|8+- zahlYzY}=}iupW<$>B^`^?jCGm403IGj#rRQ_b9@`n#Hf5fOf$jl_+y0JZO5t8aDZU zxy$g56$-)L^S<4|i1ziVN|VpA&3?fP;?VrPtE%ekXLc|Y%MS!F{FS)GQXw?4cpP-# z0`}5!3*9k`{#`O>N>F@8@t`s*tnC`W+R0- zVn%wti{oUF6$hx59xcH02%AiI#Fd=~CM6$bLW5OoGhOG*1)xIkcyvNrr5~ggltgy0 zSznZkbAu|?<-IU-^Vc`T#Y#ah?P{;&2jTE7T55Uz1@QcA!kO zUMoRnNIlI^g;EcuU)IEU=_q+csMkDa)MbI0=y(5bjLbM?HlBtDi;)}@?yl;+bcE|6 z@(1}On~UWL1~=00#0QDH_f`D8KoYRDCY%kB+k>UWK1InhaeqeQXq8z9NEu}*W++Hg z$Lk_qHB)OFPz+?nZg_6>#XWm0be$rUcHCeh7VlZ^6lQdUOAp;J!pgH0xrnEv_t~m* zwDJFHhw70YNEg>;MwY=QuoYp~d?c@vKeGVWzJu2`@6&NXKv!FUAgd>rU)?9x9i)Eo z4q&_Xqy*v@q2+1T;hQ!S{f(-{U8M7!aT6ntQu!{9Pw@p`TT{ZE^R#USxpBG)@^UVH zfhf}h1=uvJfx+b)ky*qkgqKI&212X1G?mCRp<$d_$a#}gSPf%wZe?@fd@yHhQ`qI| z_Ty~8PLzq8-bXOi|wA3n7@>qX@Wg=M|R!TJ-IaVVs92vl7G zC*qy41%U>x@w|WD;@d9SdP`~Ur^JV%-B%he{C6Uj
H9wcEVIQLC;{_Lrhw!1?O zY?>T~nTv+A9U<%`XaFw&gmVWWvT%vFeOOwig0fLU#qSsw+i^l?(nY9KrI7P_B`N{# z!WeN9&!e(qwhRpb;8Z|I>^FbQa|k(0#gr%frow?Nuhr$~Q*i8KZWhZy{_HyfEoYgr z{vfCs0P>fV^SzEenLy6aPU0k_e6t8OY#rux?*k!|U-!$6EBvS>5T=H67ZKlsWaeSR z8rw%&kJAonK_zn70U^`O}~sQQy0WwBfEzUjJsu*a%sBy4eq#e zkAx3im>&McF<*Ke;>GVcz z*9#V+1&DGg5WYrV=28QOR)0h+v5&cEjRneKHvp%3&i$ga4F`9c8Z!g+*|i z2&H1Ij7f4F96yh!#fEJ5R1s8~r3n>}%9%VqCTnv>yw<)T7Geh8xG}VtDN_@1ftaHA zeNc8629EWwHA9aM*ij+;x?WWTR0DXhtfJDnTM8|obzevqf1o!ZqfK_%2&0U_N-d*V zJ3DPpTEBSjPelq&3!*P%JJEVdwwG9CoWYHqZ1lzSdM_REF3j;M&ZMSR{4q+TkYm1h-LO9Kr6#mhjI67bln^R@nu3=Iehe?6(`lDzK^7l)w~(%A z6Kh|y?B{KjiLz7F2|#f0iV}J=?smIbUuku|Sy91#d$+Yvx}F`aQ;2}`6nW0ujSl$Z zop(t6BE=!0>01UV(ThP!@tXB=3lMZX5T=MKZp;FGkoP|>YQZF172goE`obJ%ljpB= zLQBh2D_*fe`r#+vIg@RhNI(0qZP82A^pIkHv5u1=jqu_{YzSg{L#rq)a`ndtlp5dy z6Pya_x>9+uEV|!He$X_$))o}5aH;6{iW%u@i5&%&>dk|*LjvsS+q0IoPHVyt%E_PB zUjG#&LWFdRL})ZJs~2}wS&Rw|-A9B)DWwT;jY`Jck9_d*HPrFYTGBYy+Pm*x(rwA; z<1HSBsdjybn;H|(BD41KGDzY+6y7ln5IMUxir>uW|4R(a&0NoJEvWv}oW+64-y^qz z%X58H5ewhh=$D#%kfVb4AwK|8vfjnW4N5UaMa*u#u1eB17^I%>Q+j9ckcAzaWhLEpt?0ziA;9#F_~3gS;A89iwhG3D}H?| z*YxjN8vXl-TZM4u-n{ml19Gd|vMbI?WdP(()jt4VHR-#AUvNSG<>F5>4oe|1R+faa zRI*(!5%+`C=wZqlB|}&eu}3vE|ke+y$8@uH{h+ zNCAvQa7@xQ=f!v`~VHV3a(hrG`K6jzL7-CB3!)-GT7JodE&2rG<_|Y^I2kX zZ9&p;mP*Z24h|DCw@p!+NI+Jv#jR2HjV|@+*TU#Jz}->b+JKwWp%9EG|0n8j;XP#e zTpwo2M3%KjE_l0RK}{^XfliUl$78Mw)O9T2*t$i#r}i^mncO;UKn zfW2}&M}0PNqHieQ{p`aauy43o)cz?MGU@|xmaT8w#j>y+Epi#A!BQb8YPad!K;afB zYHLVAd3mRZbM|t_Bbv-_60ZkPXJ1;c)O20q4`HF9A1C#xv-hws*Q1gnmbrz}N^aqD z2I7_u;&R{j{ApHcdGN@RZjJS3F zd9Io9ZIIrhce%RH4fY|@k2{aKFJ?#NWtA8XOtJ@iYV}a-41bz7GRLm5V8WIk67e^Dy(%R3^+XzqL!e}(n7u+!oMvd{j=>QUaj9`s5~(l$ zt2x#@G3^C>hSN!rGb1h>!>R;2=ZzKgFCkU=F1=Tt&f(t`acqm2U=5y90xVSIK2Ak1 ziE{ZEzLe)9ZrHLqiZZdY!!uwX$pne(h*NP1O}k}6lzcG4)qjJF(rreDJ^Uz}E+VsU z2T<#Nb?flUa_}YqA!UakI}q9MJkHR&T*o--(_CXaFi=2bJ2%wCF>YHS%tM??eBPs| z6m3wTO_NJr8SBT{g{47^3#qo=BCpm2WAXsZOA=-(Obx(zFahW+zP~-7wf_{ReH)wH z>{-J3z9o5vtqn>@tn4%-H9hlsxDVUMKY*|OH0+C)z$>1L7!4Um>NvPf8c-HjOvci5 ze3$BOX6qC0O*(Hfe)L8l3)ggrl7)o0zyjVk0c`4KDUq0Ji2E7tx++#vE+4EcJ32Zb KtknZI2*A(0C1wQx diff --git a/build/release.py b/build/release.py deleted file mode 100644 index f17600e79..000000000 --- a/build/release.py +++ /dev/null @@ -1,250 +0,0 @@ -"""Bump version and create Github release - -This script should be run locally, not on a build server. -""" -import argparse -import contextlib -import os -import json -import re -import requests -import subprocess -import sys - -import git -import github - -import changelog - -ROOT = os.path.dirname(os.path.dirname(os.path.realpath(__file__))) -APP_PACKAGE_JSON_FILE = os.path.join(ROOT, 'package.json') - - -def main(): - bumpversion_parts = get_bumpversion_parts() - - parser = argparse.ArgumentParser() - parser.add_argument("part", choices=bumpversion_parts, help="part of version to bump") - parser.add_argument("--skip-sanity-checks", action="store_true") - parser.add_argument("--skip-push", action="store_true") - parser.add_argument("--dry-run", action="store_true") - parser.add_argument("--confirm", action="store_true") - args = parser.parse_args() - - if args.dry_run: - print "DRY RUN. Nothing will be committed/pushed." - - repo = Repo('lbry-app', args.part, ROOT) - branch = 'master' - - print 'Current version: {}'.format(repo.current_version) - print 'New version: {}'.format(repo.new_version) - with open(APP_PACKAGE_JSON_FILE, 'r') as f: - package_settings = json.load(f)['lbrySettings'] - daemon_url_template = package_settings['lbrynetDaemonUrlTemplate'] - daemon_version = package_settings['lbrynetDaemonVersion'] - print 'Daemon version: {} ({})'.format(daemon_version, daemon_url_template.replace('DAEMONVER', daemon_version)) - - if not args.confirm and not confirm(): - print "Aborting" - return 1 - - if not args.skip_sanity_checks: - run_sanity_checks(repo, branch) - repo.assert_new_tag_is_absent() - - is_rc = re.search('-rc\d+$', repo.new_version) is not None - # only have a release message for real releases, not for RCs - release_msg = '' if is_rc else repo.get_unreleased_changelog() - - if args.dry_run: - print "rc: " + ("yes" if is_rc else "no") - print "release message: \n" + (release_msg or " NO MESSAGE FOR RCs") - return - - gh_token = get_gh_token() - auth = github.Github(gh_token) - github_repo = auth.get_repo('lbryio/lbry-app') - - if not is_rc: - repo.bump_changelog() - repo.bumpversion() - - new_tag = repo.get_new_tag() - github_repo.create_git_release(new_tag, new_tag, release_msg, draft=True, prerelease=is_rc) - - if args.skip_push: - print ( - 'Skipping push; you will have to reset and delete tags if ' - 'you want to run this script again.' - ) - else: - repo.git_repo.git.push(follow_tags=True, recurse_submodules='check') - - -class Repo(object): - def __init__(self, name, part, directory): - self.name = name - self.part = part - if not self.part: - raise Exception('Part required') - self.directory = directory - self.git_repo = git.Repo(self.directory) - self._bumped = False - - self.current_version = self._get_current_version() - self.new_version = self._get_new_version() - self._changelog = changelog.Changelog(os.path.join(self.directory, 'CHANGELOG.md')) - - def get_new_tag(self): - return 'v' + self.new_version - - def get_unreleased_changelog(self): - return self._changelog.get_unreleased() - - def bump_changelog(self): - self._changelog.bump(self.new_version) - with pushd(self.directory): - self.git_repo.git.add(os.path.basename(self._changelog.path)) - - def _get_current_version(self): - with pushd(self.directory): - output = subprocess.check_output( - ['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part]) - return re.search('^current_version=(.*)$', output, re.M).group(1) - - def _get_new_version(self): - with pushd(self.directory): - output = subprocess.check_output( - ['bumpversion', '--dry-run', '--list', '--allow-dirty', self.part]) - return re.search('^new_version=(.*)$', output, re.M).group(1) - - def bumpversion(self): - if self._bumped: - raise Exception('Cowardly refusing to bump a repo twice') - with pushd(self.directory): - subprocess.check_call(['bumpversion', '--allow-dirty', self.part]) - self._bumped = True - - def assert_new_tag_is_absent(self): - new_tag = self.get_new_tag() - tags = self.git_repo.git.tag() - if new_tag in tags.split('\n'): - raise Exception('Tag {} is already present in repo {}.'.format(new_tag, self.name)) - - def is_behind(self, branch): - self.git_repo.remotes.origin.fetch() - rev_list = '{branch}...origin/{branch}'.format(branch=branch) - commits_behind = self.git_repo.git.rev_list(rev_list, right_only=True, count=True) - commits_behind = int(commits_behind) - return commits_behind > 0 - - -def get_bumpversion_parts(): - with pushd(ROOT): - output = subprocess.check_output([ - 'bumpversion', '--dry-run', '--list', '--allow-dirty', 'fake-part', - ]) - parse_line = re.search('^parse=(.*)$', output, re.M).group(1) - return tuple(re.findall('<([^>]+)>', parse_line)) - - -def get_gh_token(): - if 'GH_TOKEN' in os.environ: - return os.environ['GH_TOKEN'] - else: - print """ -Please enter your personal access token. If you don't have one -See https://github.com/lbryio/lbry-app/wiki/Release-Script#generate-a-personal-access-token -for instructions on how to generate one. - -You can also set the GH_TOKEN environment variable to avoid seeing this message -in the future""" - return raw_input('token: ').strip() - - -def confirm(): - try: - return raw_input('Is this what you want? [y/N] ').strip().lower() == 'y' - except KeyboardInterrupt: - return False - - -def run_sanity_checks(repo, branch): - if repo.git_repo.is_dirty(): - print 'Cowardly refusing to release a dirty repo' - sys.exit(1) - if repo.git_repo.active_branch.name != branch: - print 'Cowardly refusing to release when not on the {} branch'.format(branch) - sys.exit(1) - if repo.is_behind(branch): - print 'Cowardly refusing to release when behind origin' - sys.exit(1) - if not is_custom_bumpversion_version(): - print ( - 'Install LBRY\'s fork of bumpversion: ' - 'pip install -U git+https://github.com/lbryio/bumpversion.git' - ) - sys.exit(1) - if not check_daemon_urls(): - sys.exit(1) - - -def check_daemon_urls(): - with open(APP_PACKAGE_JSON_FILE, 'r') as f: - package_settings = json.load(f)['lbrySettings'] - - daemon_url_template = package_settings['lbrynetDaemonUrlTemplate'] - daemon_version = package_settings['lbrynetDaemonVersion'] - - if "OSNAME" not in daemon_url_template: - print "Daemon URL must include the string \"OSNAME\"" - return False - elif "DAEMONVER" not in daemon_url_template: - print "Daemon URL must include the string \"DAEMONVER\"" - return False - - for osname in ('linux', 'macos', 'windows'): - if not check_url(daemon_url_template.replace('DAEMONVER', daemon_version).replace('OSNAME', osname)): - print "Daemon URL for", osname, " does not work" - return False - - return True - -def check_url(url): - url = url.strip() - r = requests.head(url) - if r.status_code >= 400: - return False - elif r.status_code >= 300: - new_location = r.headers.get('Location').strip() - if new_location == url: - # self-loop - return False - if "amazonaws.com" in new_location: - # HEAD doesnt work on s3 links, so assume its good - return True - return check_url(new_location) - return True - - -def is_custom_bumpversion_version(): - try: - output = subprocess.check_output(['bumpversion', '-v'], stderr=subprocess.STDOUT).strip() - if output == 'bumpversion 0.5.4-lbry': - return True - except (subprocess.CalledProcessError, OSError): - pass - return False - - -@contextlib.contextmanager -def pushd(new_dir): - previous_dir = os.getcwd() - os.chdir(new_dir) - yield - os.chdir(previous_dir) - - -if __name__ == '__main__': - sys.exit(main()) diff --git a/build/requirements.txt b/build/requirements.txt deleted file mode 100644 index 75805d517..000000000 --- a/build/requirements.txt +++ /dev/null @@ -1,6 +0,0 @@ -GitPython==2.1.1 -PyGithub==1.32 -requests[security]==2.13.0 -uritemplate==3.0.0 -git+https://github.com/lbryio/bumpversion.git -boto3==1.4.4 diff --git a/build/upload_assets.py b/build/upload_assets.py deleted file mode 100644 index b0fb5513c..000000000 --- a/build/upload_assets.py +++ /dev/null @@ -1,146 +0,0 @@ -import glob -import json -import os -import platform -import subprocess -import sys - -import github -import uritemplate -import boto3 - -def main(): - upload_to_github_if_tagged('lbryio/lbry-app') - - -def get_asset_path(): - this_dir = os.path.dirname(os.path.realpath(__file__)) - system = platform.system() - if system == 'Darwin': - suffix = 'dmg' - elif system == 'Linux': - suffix = 'deb' - elif system == 'Windows': - suffix = 'exe' - else: - raise Exception("I don't know about any artifact on {}".format(system)) - - return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.' + suffix)[0]) - -def get_update_asset_path(): - # Get the asset used used for updates. On Mac, this is a .zip; on - # Windows it's just the installer file. - if platform.system() == 'Darwin': - this_dir = os.path.dirname(os.path.realpath(__file__)) - return os.path.realpath(glob.glob(this_dir + '/../dist/LBRY*.zip')[0]) - else: - return get_asset_path() - - -def get_latest_file_path(): - # The update metadata file is called latest.yml on Windows, latest-mac.yml on - # Mac, latest-linux.yml on Linux - this_dir = os.path.dirname(os.path.realpath(__file__)) - - latestfilematches = glob.glob(this_dir + '/../dist/latest*.yml') - - return latestfilematches[0] if latestfilematches else None - -def upload_to_github_if_tagged(repo_name): - try: - current_tag = subprocess.check_output( - ['git', 'describe', '--exact-match', 'HEAD']).strip() - except subprocess.CalledProcessError: - print 'Not uploading to GitHub as we are not currently on a tag' - return 1 - - print "Current tag: " + current_tag - - if 'GH_TOKEN' not in os.environ: - print 'Must set GH_TOKEN in order to publish assets to a release' - return 1 - - gh_token = os.environ['GH_TOKEN'] - auth = github.Github(gh_token) - repo = auth.get_repo(repo_name) - - if not check_repo_has_tag(repo, current_tag): - print 'Tag {} is not in repo {}'.format(current_tag, repo) - # TODO: maybe this should be an error - return 1 - - asset_path = get_asset_path() - print "Uploading " + asset_path + " to Github tag " + current_tag - release = get_github_release(repo, current_tag) - upload_asset_to_github(release, asset_path, gh_token) - - -def check_repo_has_tag(repo, target_tag): - tags = repo.get_tags().get_page(0) - for tag in tags: - if tag.name == target_tag: - return True - return False - - -def get_github_release(repo, current_tag): - for release in repo.get_releases(): - if release.tag_name == current_tag: - return release - raise Exception('No release for {} was found'.format(current_tag)) - - -def upload_asset_to_github(release, asset_to_upload, token): - basename = os.path.basename(asset_to_upload) - for asset in release.raw_data['assets']: - if asset['name'] == basename: - print 'File {} has already been uploaded to {}'.format(basename, release.tag_name) - return - - upload_uri = uritemplate.expand(release.upload_url, {'name': basename}) - count = 0 - while count < 10: - try: - output = _curl_uploader(upload_uri, asset_to_upload, token) - if 'errors' in output: - raise Exception(output) - else: - print 'Successfully uploaded to {}'.format(output['browser_download_url']) - except Exception: - print 'Failed uploading on attempt {}'.format(count + 1) - count += 1 - - -def _curl_uploader(upload_uri, asset_to_upload, token): - # using requests.post fails miserably with SSL EPIPE errors. I spent - # half a day trying to debug before deciding to switch to curl. - # - # TODO: actually set the content type - print 'Using curl to upload {} to {}'.format(asset_to_upload, upload_uri) - cmd = [ - 'curl', - '-sS', - '-X', 'POST', - '-u', ':{}'.format(os.environ['GH_TOKEN']), - '--header', 'Content-Type: application/octet-stream', - '--data-binary', '@-', - upload_uri - ] - # '-d', '{"some_key": "some_value"}', - print 'Calling curl:' - print cmd - print - with open(asset_to_upload, 'rb') as fp: - p = subprocess.Popen(cmd, stdin=fp, stderr=subprocess.PIPE, stdout=subprocess.PIPE) - stdout, stderr = p.communicate() - print 'curl return code:', p.returncode - if stderr: - print 'stderr output from curl:' - print stderr - print 'stdout from curl:' - print stdout - return json.loads(stdout) - - -if __name__ == '__main__': - sys.exit(main())