2019-08-04 00:34:48 +02:00
#!/usr/bin/env bash
set -e
#Always compile ytsync
make
2019-08-28 06:10:54 +02:00
#Always compile supporty
cd e2e/supporty && make && cd ../..
2019-08-04 00:34:48 +02:00
#OVERRIDE this in your .env file if running from mac. Check docker-compose.yml for details
2019-08-11 04:50:43 +02:00
export LOCAL_TMP_DIR = "/var/tmp:/var/tmp"
2019-08-04 00:34:48 +02:00
#Private Variables Set in local installations: SLACK_TOKEN,YOUTUBE_API_KEY,AWS_S3_ID,AWS_S3_SECRET,AWS_S3_REGION,AWS_S3_BUCKET
touch -a .env && set -o allexport; source ./.env; set +o allexport
echo " LOCAL_TMP_DIR= $LOCAL_TMP_DIR "
# Compose settings - docker only
export SLACK_CHANNEL = "ytsync-travis"
export LBRY_API_TOKEN = "ytsyntoken"
export LBRY_WEB_API = "http://localhost:15400"
export LBRYNET_ADDRESS = "http://localhost:15100"
export LBRYCRD_STRING = "tcp://lbry:lbry@localhost:15200"
export LBRYNET_USE_DOCKER = true
export REFLECT_BLOBS = false
export CLEAN_ON_STARTUP = true
export REGTEST = true
# Local settings
export BLOBS_DIRECTORY = " $( pwd ) /e2e/blobsfiles "
export LBRYNET_DIR = " $( pwd ) /e2e/persist/.lbrynet/.local/share/lbry/lbrynet/ "
export LBRYNET_WALLETS_DIR = " $( pwd ) /e2e/persist/.lbrynet/.local/share/lbry/lbryum "
export TMP_DIR = "/var/tmp"
export UID
cd ./e2e
docker-compose stop
docker-compose rm -f
echo " $DOCKER_PASSWORD " | docker login --username " $DOCKER_USERNAME " --password-stdin
docker-compose pull
if [ [ -d persist ] ] ; then rm -rf persist; fi
2019-08-11 04:50:43 +02:00
mkdir -m 0777 -p ./persist
mkdir -m 777 -p ./persist/.walletserver
mkdir -m 777 -p ./persist/.lbrynet
#sudo chown -Rv 999:999 ./persist/.walletserver
#sudo chown -Rv 1000:1000 ./persist/.lbrynet
2019-08-04 00:34:48 +02:00
docker-compose up -d
printf 'waiting for internal apis'
until curl --output /dev/null --silent --head --fail http://localhost:15400; do
printf '.'
sleep 1
done
echo "successfully started..."
#Data Setup for test
2019-08-11 04:50:43 +02:00
./data_setup.sh
2019-08-04 00:34:48 +02:00
2019-08-16 05:34:25 +02:00
# Execute the sync test!
2019-10-11 19:41:55 +02:00
./../bin/ytsync --channelID UCNQfQvFMPnInwsU_iGYArJQ --videos-limit 2 --concurrent-jobs 4 #Force channel intended...just in case. This channel lines up with the api container
2019-08-04 00:34:48 +02:00
status = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT status FROM youtube_data WHERE id=1' )
videoStatus = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT status FROM synced_video WHERE id=1' )
2019-10-11 00:57:42 +02:00
videoClaimID1 = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT claim_id FROM synced_video WHERE id=1' )
2019-12-11 00:43:44 +01:00
videoClaimID2 = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT claim_id FROM synced_video WHERE id=2' )
2019-12-10 21:45:29 +01:00
videoClaimAddress1 = $( mysql -u lbry -plbry -ss -D chainquery -h "127.0.0.1" -P 15500 -e 'SELECT claim_address FROM claim WHERE id=2' )
2019-12-11 00:43:44 +01:00
videoClaimAddress2 = $( mysql -u lbry -plbry -ss -D chainquery -h "127.0.0.1" -P 15500 -e 'SELECT claim_address FROM claim WHERE id=3' )
2019-08-23 05:02:40 +02:00
# Create Supports for published claim
2019-10-11 00:57:42 +02:00
./supporty/supporty @BeamerTest " ${ videoClaimID1 } " " ${ videoClaimAddress1 } " lbrycrd_regtest 1.0
./supporty/supporty @BeamerTest " ${ videoClaimID2 } " " ${ videoClaimAddress2 } " lbrycrd_regtest 2.0
./supporty/supporty @BeamerTest " ${ videoClaimID1 } " " ${ videoClaimAddress1 } " lbrycrd_regtest 3.0
./supporty/supporty @BeamerTest " ${ videoClaimID2 } " " ${ videoClaimAddress2 } " lbrycrd_regtest 3.0
2019-08-27 19:49:51 +02:00
curl --data-binary '{"jsonrpc":"1.0","id":"curltext","method":"generate","params":[1]}' -H 'content-type:text/plain;' --user lbry:lbry http://localhost:15200
2019-08-16 05:34:25 +02:00
# Reset status for tranfer test
mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e "UPDATE youtube_data SET status = 'queued' WHERE id = 1"
# Trigger transfer api
2019-09-25 03:30:07 +02:00
curl -i -H 'Accept: application/json' -H 'Content-Type: application/json' 'http://localhost:15400/yt/transfer?auth_token=youtubertoken&address=n4eYeXAYmHo4YRUDEfsEhucy8y5LKRMcHg&public_key=tpubDA9GDAntyJu4hD3wU7175p7CuV6DWbYXfyb2HedBA3yuBp9HZ4n3QE4Ex6RHCSiEuVp2nKAL1Lzf2ZLo9ApaFgNaJjG6Xo1wB3iEeVbrDZp'
2019-08-16 05:34:25 +02:00
# Execute the transfer test!
2019-10-11 19:41:55 +02:00
./../bin/ytsync --channelID UCNQfQvFMPnInwsU_iGYArJQ --videos-limit 2 --concurrent-jobs 4 #Force channel intended...just in case. This channel lines up with the api container
2019-08-28 21:08:25 +02:00
# Check that the channel and the video are marked as transferred and that all supports are spent
2019-10-11 00:57:42 +02:00
channelTransferStatus = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT distinct transfer_state FROM youtube_data' )
videoTransferStatus = $( mysql -u lbry -plbry -ss -D lbry -h "127.0.0.1" -P 15500 -e 'SELECT distinct transferred FROM synced_video' )
2019-12-10 21:45:29 +01:00
nrUnspentSupports = $( mysql -u lbry -plbry -ss -D chainquery -h "127.0.0.1" -P 15500 -e 'SELECT COUNT(*) FROM chainquery.support INNER JOIN output ON output.transaction_hash = support.transaction_hash_id AND output.vout = support.vout WHERE output.is_spent = 0' )
2019-08-28 21:08:25 +02:00
if [ [ $status != "synced" || $videoStatus != "published" || $channelTransferStatus != "2" || $videoTransferStatus != "1" || $nrUnspentSupports != "1" ] ] ; then
2019-08-23 01:28:51 +02:00
echo "~~!!!~~~FAILED~~~!!!~~"
echo " Channel Status: $status "
echo " Video Status: $videoStatus "
echo " Channel Transfer Status: $channelTransferStatus "
echo " Video Transfer Status: $videoTransferStatus "
2019-08-28 06:10:54 +02:00
echo " Nr Unspent Supports: $nrUnspentSupports "
2019-08-23 01:28:51 +02:00
#docker-compose logs --tail="all" lbrycrd
#docker-compose logs --tail="all" walletserver
#docker-compose logs --tail="all" lbrynet
#docker-compose logs --tail="all" internalapis
exit 1;
2019-08-23 04:10:36 +02:00
else
echo "SUCCESSSSSSSSSSSSS!"
2019-08-28 21:08:25 +02:00
fi ;