Compare commits

..

8 commits

Author SHA1 Message Date
pseudoscalar
ce901f6b01 Add option to not wait for reflection. Add instructions for getting a YouTube API key. Fix some minor issues. 2021-11-17 10:07:20 -06:00
pseudoscalar
9f6b15e841 Clean up local cache after publishing stream 2021-11-05 11:22:20 -05:00
pseudoscalar
e564dc8445 Add dry-run option 2021-11-05 10:09:59 -05:00
pseudoscalar
eb30fa4299 Determine release time via YouTube API 2021-11-05 09:39:43 -05:00
pseudoscalar
2ba960ae01 Refactor to support future direction of development 2021-11-05 09:39:43 -05:00
pseudoscalar
8ea15afce8 Basic stream publishing. Still needs some work. 2021-11-05 09:39:43 -05:00
pseudoscalar
b9bf2f6e73 Download video to sync to local cache 2021-11-05 09:39:43 -05:00
Alex Grintsvayg
e554bbfe18
started work on local ytsync 2021-10-25 10:45:39 -04:00
37 changed files with 1728 additions and 1531 deletions

4
.gitignore vendored
View file

@ -4,7 +4,3 @@ e2e/supporty/supporty
.env .env
blobsfiles blobsfiles
ytsync_docker ytsync_docker
e2e/config.json
e2e/cookies.txt

View file

@ -2,7 +2,7 @@ os: linux
dist: bionic dist: bionic
language: go language: go
go: go:
- 1.17.x - 1.16.3
install: true install: true

View file

@ -1,111 +0,0 @@
From 30380338ba9af01696c94b61f0597131638eaec1 Mon Sep 17 00:00:00 2001
From: Niko Storni <niko@lbry.io>
Date: Mon, 16 Dec 2019 00:13:36 +0100
Subject: [PATCH] lbry-patch
---
youtube_dl/extractor/youtube.py | 45 +++++++++++++++++++++++++--------
1 file changed, 35 insertions(+), 10 deletions(-)
diff --git a/youtube_dl/extractor/youtube.py b/youtube_dl/extractor/youtube.py
index b913d07a6..cd66a5b01 100644
--- a/youtube_dl/extractor/youtube.py
+++ b/youtube_dl/extractor/youtube.py
@@ -10,6 +10,7 @@ import random
import re
import time
import traceback
+import subprocess
from .common import InfoExtractor, SearchInfoExtractor
from ..jsinterp import JSInterpreter
@@ -536,6 +537,9 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
_GEO_BYPASS = False
+ _WGET_429_RATE_LIMIT = 8191
+ _WGET_BINARY = "wget"
+
IE_NAME = 'youtube'
_TESTS = [
{
@@ -1254,6 +1258,17 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
""" Return a string representation of a signature """
return '.'.join(compat_str(len(part)) for part in example_sig.split('.'))
+ def _rate_limit_download(self, url, video_id, note=None):
+ if note is None:
+ self.report_download_webpage(video_id)
+ elif note is not False:
+ if video_id is None:
+ self.to_screen('%s' % (note,))
+ else:
+ self.to_screen('%s: %s' % (video_id, note))
+ source_address = self._downloader.params.get('source_address')
+ return subprocess.run([self._WGET_BINARY, '-q', '--limit-rate', str(self._WGET_429_RATE_LIMIT), '--bind-address', source_address, '-O', '-', url], check=True, stdout=subprocess.PIPE).stdout.decode(encoding='UTF-8')
+
def _extract_signature_function(self, video_id, player_url, example_sig):
id_m = re.match(
r'.*?-(?P<id>[a-zA-Z0-9_-]+)(?:/watch_as3|/html5player(?:-new)?|(?:/[a-z]{2,3}_[A-Z]{2})?/base)?\.(?P<ext>[a-z]+)$',
@@ -1678,7 +1693,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
# Get video webpage
url = proto + '://www.youtube.com/watch?v=%s&gl=US&hl=en&has_verified=1&bpctr=9999999999' % video_id
- video_webpage = self._download_webpage(url, video_id)
+ video_webpage = self._rate_limit_download(url, video_id)
# Attempt to extract SWF player URL
mobj = re.search(r'swfConfig.*?"(https?:\\/\\/.*?watch.*?-.*?\.swf)"', video_webpage)
@@ -1736,10 +1751,9 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
r'"sts"\s*:\s*(\d+)', embed_webpage, 'sts', default=''),
})
video_info_url = proto + '://www.youtube.com/get_video_info?' + data
- video_info_webpage = self._download_webpage(
+ video_info_webpage = self._rate_limit_download(
video_info_url, video_id,
- note='Refetching age-gated info webpage',
- errnote='unable to download video info webpage')
+ note='Refetching age-gated info webpage')
video_info = compat_parse_qs(video_info_webpage)
pl_response = video_info.get('player_response', [None])[0]
player_response = extract_player_response(pl_response, video_id)
@@ -1777,7 +1791,7 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
# The general idea is to take a union of itags of both DASH manifests (for example
# video with such 'manifest behavior' see https://github.com/ytdl-org/youtube-dl/issues/6093)
self.report_video_info_webpage_download(video_id)
- for el in ('embedded', 'detailpage', 'vevo', ''):
+ for el in ('', 'embedded', 'detailpage', 'vevo'):
query = {
'video_id': video_id,
'ps': 'default',
@@ -1789,11 +1803,22 @@ class YoutubeIE(YoutubeBaseInfoExtractor):
query['el'] = el
if sts:
query['sts'] = sts
- video_info_webpage = self._download_webpage(
- '%s://www.youtube.com/get_video_info' % proto,
- video_id, note=False,
- errnote='unable to download video info webpage',
- fatal=False, query=query)
+
+ if el == '':
+ base_url = 'https://youtube.com/get_video_info?video_id={}'.format(video_id)
+ else:
+ base_url = 'https://youtube.com/get_video_info'
+
+ for q in query:
+ if q is None or q is "":
+ continue
+ if query[q] is None or query[q] is "":
+ continue
+
+ base_url = base_url + "?{}={}".format(q, query[q])
+
+ video_info_webpage = self._rate_limit_download(base_url, video_id)
+
if not video_info_webpage:
continue
get_video_info = compat_parse_qs(video_info_webpage)
--
2.17.1

View file

@ -8,17 +8,23 @@ With the support of said database, the tool is also able to keep all the channel
# Requirements # Requirements
- lbrynet SDK https://github.com/lbryio/lbry-sdk/releases (We strive to keep the latest release of ytsync compatible with the latest major release of the SDK) - lbrynet SDK https://github.com/lbryio/lbry/releases (We strive to keep the latest release of ytsync compatible with the latest major release of the SDK)
- a lbrycrd node running (localhost or on a remote machine) with credits in it - a lbrycrd node running (localhost or on a remote machine) with credits in it
- internal-apis (you cannot run this one yourself)
- python3-pip
- yt-dlp (`pip3 install -U yt-dlp`)
- ffmpeg (latest)
# Setup # Setup
- make sure daemon is stopped and can be controlled through `systemctl` (find example below) - make sure daemon is stopped and can be controlled through `systemctl` (find example below)
- extract the ytsync binary anywhere - extract the ytsync binary anywhere
- create and fill `config.json` using [this example](config.json.example) - add the environment variables necessary to the tool
- export SLACK_TOKEN="a-token-to-spam-your-slack"
- export SLACK_CHANNEL="youtube-status"
- export YOUTUBE_API_KEY="youtube-api-key"
- export LBRY_WEB_API="https://lbry-api-url-here"
- export LBRY_API_TOKEN="internal-apis-token-for-ytsync-user"
- export LBRYCRD_STRING="tcp://user:password@host:5429"
- export AWS_S3_ID="THE-ID-LIES-HERE"
- export AWS_S3_SECRET="THE-SECRET-LIES-HERE"
- export AWS_S3_REGION="us-east-1"
- export AWS_S3_BUCKET="ytsync-wallets"
## systemd script example ## systemd script example
`/etc/systemd/system/lbrynet.service` `/etc/systemd/system/lbrynet.service`
@ -49,26 +55,23 @@ Usage:
Flags: Flags:
--after int Specify from when to pull jobs [Unix time](Default: 0) --after int Specify from when to pull jobs [Unix time](Default: 0)
--before int Specify until when to pull jobs [Unix time](Default: current Unix time) (default 1669311891) --before int Specify until when to pull jobs [Unix time](Default: current Unix time) (default current timestamp)
--channelID string If specified, only this channel will be synced. --channelID string If specified, only this channel will be synced.
--concurrent-jobs int how many jobs to process concurrently (default 1) --concurrent-jobs int how many jobs to process concurrently (default 1)
-h, --help help for ytsync -h, --help help for ytsync
--limit int limit the amount of channels to sync --limit int limit the amount of channels to sync
--max-length int Maximum video length to process (in hours) (default 2) --max-length float Maximum video length to process (in hours) (default 2)
--max-size int Maximum video size to process (in MB) (default 2048) --max-size int Maximum video size to process (in MB) (default 2048)
--max-tries int Number of times to try a publish that fails (default 3) --max-tries int Number of times to try a publish that fails (default 3)
--no-transfers Skips the transferring process of videos, channels and supports
--quick Look up only the last 50 videos from youtube
--remove-db-unpublished Remove videos from the database that are marked as published but aren't really published --remove-db-unpublished Remove videos from the database that are marked as published but aren't really published
--run-once Whether the process should be stopped after one cycle or not --run-once Whether the process should be stopped after one cycle or not
--skip-space-check Do not perform free space check on startup --skip-space-check Do not perform free space check on startup
--status string Specify which queue to pull from. Overrides --update --status string Specify which queue to pull from. Overrides --update
--status2 string Specify which secondary queue to pull from. --stop-on-error If a publish fails, stop all publishing and exit
--takeover-existing-channel If channel exists and we don't own it, take over the channel --takeover-existing-channel If channel exists and we don't own it, take over the channel
--update Update previously synced channels instead of syncing new ones --update Update previously synced channels instead of syncing new ones
--upgrade-metadata Upgrade videos if they're on the old metadata version --upgrade-metadata Upgrade videos if they're on the old metadata version
--videos-limit int how many videos to process per channel (leave 0 for automatic detection) --videos-limit int how many videos to process per channel (default 1000)
``` ```
## Running from Source ## Running from Source
@ -85,17 +88,17 @@ Contributions to this project are welcome, encouraged, and compensated. For more
## Security ## Security
We take security seriously. Please contact [security@lbry.io](mailto:security@lbry.io) regarding any security issues. Our PGP key is [here](https://lbry.com/faq/pgp-key) if you need it. We take security seriously. Please contact [security@lbry.io](mailto:security@lbry.io) regarding any security issues. Our PGP key is [here](https://keybase.io/lbry/key.asc) if you need it.
## Contact ## Contact
The primary contact for this project is [Niko Storni](https://github.com/nikooo777) (niko@lbry.com). The primary contact for this project is [Niko Storni](https://github.com/nikooo777) (niko@lbry.io).
## Additional Info and Links ## Additional Info and Links
- [https://lbry.com](https://lbry.com) - The live LBRY website - [https://lbry.io](https://lbry.io) - The live LBRY website
- [Discord Chat](https://chat.lbry.com) - A chat room for the LBRYians - [Discord Chat](https://chat.lbry.io) - A chat room for the LBRYians
- [Email us](mailto:hello@lbry.com) - LBRY Support email - [Email us](mailto:hello@lbry.io) - LBRY Support email
- [Twitter](https://twitter.com/@lbryio) - LBRY Twitter page - [Twitter](https://twitter.com/@lbryio) - LBRY Twitter page
- [Facebook](https://www.facebook.com/lbryio/) - LBRY Facebook page - [Facebook](https://www.facebook.com/lbryio/) - LBRY Facebook page
- [Reddit](https://reddit.com/r/lbry) - LBRY Reddit page - [Reddit](https://reddit.com/r/lbry) - LBRY Reddit page

7
cmd/local.go Normal file
View file

@ -0,0 +1,7 @@
package cmd
import "github.com/lbryio/ytsync/v5/local"
func init() {
local.AddCommand(rootCmd)
}

166
cmd/root.go Normal file
View file

@ -0,0 +1,166 @@
package cmd
import (
"os"
"time"
"github.com/lbryio/ytsync/v5/manager"
"github.com/lbryio/ytsync/v5/sdk"
"github.com/lbryio/ytsync/v5/shared"
ytUtils "github.com/lbryio/ytsync/v5/util"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/lbry.go/v2/extras/util"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
const defaultMaxTries = 3
var (
cliFlags shared.SyncFlags
maxVideoLength int
)
var rootCmd = &cobra.Command{
Use: "ytsync",
Short: "Publish youtube channels into LBRY network automatically.",
Run: ytSync,
Args: cobra.RangeArgs(0, 0),
}
func init() {
rootCmd.Flags().IntVar(&cliFlags.MaxTries, "max-tries", defaultMaxTries, "Number of times to try a publish that fails")
rootCmd.Flags().BoolVar(&cliFlags.TakeOverExistingChannel, "takeover-existing-channel", false, "If channel exists and we don't own it, take over the channel")
rootCmd.Flags().IntVar(&cliFlags.Limit, "limit", 0, "limit the amount of channels to sync")
rootCmd.Flags().BoolVar(&cliFlags.SkipSpaceCheck, "skip-space-check", false, "Do not perform free space check on startup")
rootCmd.Flags().BoolVar(&cliFlags.SyncUpdate, "update", false, "Update previously synced channels instead of syncing new ones")
rootCmd.Flags().BoolVar(&cliFlags.SingleRun, "run-once", false, "Whether the process should be stopped after one cycle or not")
rootCmd.Flags().BoolVar(&cliFlags.RemoveDBUnpublished, "remove-db-unpublished", false, "Remove videos from the database that are marked as published but aren't really published")
rootCmd.Flags().BoolVar(&cliFlags.UpgradeMetadata, "upgrade-metadata", false, "Upgrade videos if they're on the old metadata version")
rootCmd.Flags().BoolVar(&cliFlags.DisableTransfers, "no-transfers", false, "Skips the transferring process of videos, channels and supports")
rootCmd.Flags().BoolVar(&cliFlags.QuickSync, "quick", false, "Look up only the last 50 videos from youtube")
rootCmd.Flags().StringVar(&cliFlags.Status, "status", "", "Specify which queue to pull from. Overrides --update")
rootCmd.Flags().StringVar(&cliFlags.SecondaryStatus, "status2", "", "Specify which secondary queue to pull from.")
rootCmd.Flags().StringVar(&cliFlags.ChannelID, "channelID", "", "If specified, only this channel will be synced.")
rootCmd.Flags().Int64Var(&cliFlags.SyncFrom, "after", time.Unix(0, 0).Unix(), "Specify from when to pull jobs [Unix time](Default: 0)")
rootCmd.Flags().Int64Var(&cliFlags.SyncUntil, "before", time.Now().AddDate(1, 0, 0).Unix(), "Specify until when to pull jobs [Unix time](Default: current Unix time)")
rootCmd.Flags().IntVar(&cliFlags.ConcurrentJobs, "concurrent-jobs", 1, "how many jobs to process concurrently")
rootCmd.Flags().IntVar(&cliFlags.VideosLimit, "videos-limit", 0, "how many videos to process per channel (leave 0 for automatic detection)")
rootCmd.Flags().IntVar(&cliFlags.MaxVideoSize, "max-size", 2048, "Maximum video size to process (in MB)")
rootCmd.Flags().IntVar(&maxVideoLength, "max-length", 2, "Maximum video length to process (in hours)")
}
func Execute() {
err := rootCmd.Execute()
if err != nil {
log.Errorln(err)
os.Exit(1)
}
}
func ytSync(cmd *cobra.Command, args []string) {
var hostname string
slackToken := os.Getenv("SLACK_TOKEN")
if slackToken == "" {
log.Error("A slack token was not present in env vars! Slack messages disabled!")
} else {
var err error
hostname, err = os.Hostname()
if err != nil {
log.Error("could not detect system hostname")
hostname = "ytsync-unknown"
}
if len(hostname) > 30 {
hostname = hostname[0:30]
}
util.InitSlack(os.Getenv("SLACK_TOKEN"), os.Getenv("SLACK_CHANNEL"), hostname)
}
if cliFlags.Status != "" && !util.InSlice(cliFlags.Status, shared.SyncStatuses) {
log.Errorf("status must be one of the following: %v\n", shared.SyncStatuses)
return
}
if cliFlags.MaxTries < 1 {
log.Errorln("setting --max-tries less than 1 doesn't make sense")
return
}
if cliFlags.Limit < 0 {
log.Errorln("setting --limit less than 0 (unlimited) doesn't make sense")
return
}
cliFlags.MaxVideoLength = time.Duration(maxVideoLength) * time.Hour
apiURL := os.Getenv("LBRY_WEB_API")
apiToken := os.Getenv("LBRY_API_TOKEN")
youtubeAPIKey := os.Getenv("YOUTUBE_API_KEY")
lbrycrdDsn := os.Getenv("LBRYCRD_STRING")
awsS3ID := os.Getenv("AWS_S3_ID")
awsS3Secret := os.Getenv("AWS_S3_SECRET")
awsS3Region := os.Getenv("AWS_S3_REGION")
awsS3Bucket := os.Getenv("AWS_S3_BUCKET")
if apiURL == "" {
log.Errorln("An API URL was not defined. Please set the environment variable LBRY_WEB_API")
return
}
if apiToken == "" {
log.Errorln("An API Token was not defined. Please set the environment variable LBRY_API_TOKEN")
return
}
if youtubeAPIKey == "" {
log.Errorln("A Youtube API key was not defined. Please set the environment variable YOUTUBE_API_KEY")
return
}
if awsS3ID == "" {
log.Errorln("AWS S3 ID credentials were not defined. Please set the environment variable AWS_S3_ID")
return
}
if awsS3Secret == "" {
log.Errorln("AWS S3 Secret credentials were not defined. Please set the environment variable AWS_S3_SECRET")
return
}
if awsS3Region == "" {
log.Errorln("AWS S3 Region was not defined. Please set the environment variable AWS_S3_REGION")
return
}
if awsS3Bucket == "" {
log.Errorln("AWS S3 Bucket was not defined. Please set the environment variable AWS_S3_BUCKET")
return
}
if lbrycrdDsn == "" {
log.Infoln("Using default (local) lbrycrd instance. Set LBRYCRD_STRING if you want to use something else")
}
blobsDir := ytUtils.GetBlobsDir()
apiConfig := &sdk.APIConfig{
YoutubeAPIKey: youtubeAPIKey,
ApiURL: apiURL,
ApiToken: apiToken,
HostName: hostname,
}
awsConfig := &shared.AwsConfigs{
AwsS3ID: awsS3ID,
AwsS3Secret: awsS3Secret,
AwsS3Region: awsS3Region,
AwsS3Bucket: awsS3Bucket,
}
sm := manager.NewSyncManager(
cliFlags,
blobsDir,
lbrycrdDsn,
awsConfig,
apiConfig,
)
err := sm.Start()
if err != nil {
ytUtils.SendErrorToSlack(errors.FullTrace(err))
}
ytUtils.SendInfoToSlack("Syncing process terminated!")
}

View file

@ -1,35 +0,0 @@
{
"slack_token": "",
"slack_channel": "ytsync-dev",
"internal_apis_endpoint": "http://localhost:15400",
"internal_apis_auth_token": "ytsyntoken",
"lbrycrd_string": "tcp://lbry:lbry@localhost:15200",
"wallet_s3_config": {
"id": "",
"secret": "",
"region": "us-east-1",
"bucket": "ytsync-wallets",
"endpoint": ""
},
"blockchaindb_s3_config": {
"id": "",
"secret": "",
"region": "us-east-1",
"bucket": "blockchaindbs",
"endpoint": ""
},
"thumbnails_s3_config": {
"id": "",
"secret": "",
"region": "us-east-1",
"bucket": "thumbnails.lbry.com",
"endpoint": ""
},
"aws_thumbnails_s3_config": {
"id": "",
"secret": "",
"region": "us-east-1",
"bucket": "thumbnails.lbry.com",
"endpoint": ""
}
}

View file

@ -1,75 +0,0 @@
package configs
import (
"os"
"regexp"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
log "github.com/sirupsen/logrus"
"github.com/tkanos/gonfig"
)
type S3Configs struct {
ID string `json:"id"`
Secret string `json:"secret"`
Region string `json:"region"`
Bucket string `json:"bucket"`
Endpoint string `json:"endpoint"`
}
type Configs struct {
SlackToken string `json:"slack_token"`
SlackChannel string `json:"slack_channel"`
InternalApisEndpoint string `json:"internal_apis_endpoint"`
InternalApisAuthToken string `json:"internal_apis_auth_token"`
LbrycrdString string `json:"lbrycrd_string"`
WalletS3Config S3Configs `json:"wallet_s3_config"`
BlockchaindbS3Config S3Configs `json:"blockchaindb_s3_config"`
AWSThumbnailsS3Config S3Configs `json:"aws_thumbnails_s3_config"`
ThumbnailsS3Config S3Configs `json:"thumbnails_s3_config"`
}
var Configuration *Configs
func Init(configPath string) error {
if Configuration != nil {
return nil
}
c := Configs{}
err := gonfig.GetConf(configPath, &c)
if err != nil {
return errors.Err(err)
}
Configuration = &c
return nil
}
func (s *S3Configs) GetS3AWSConfig() *aws.Config {
return &aws.Config{
Credentials: credentials.NewStaticCredentials(s.ID, s.Secret, ""),
Region: &s.Region,
Endpoint: &s.Endpoint,
S3ForcePathStyle: aws.Bool(true),
}
}
func (c *Configs) GetHostname() string {
var hostname string
var err error
hostname, err = os.Hostname()
if err != nil {
log.Error("could not detect system hostname")
hostname = "ytsync_unknown"
}
reg, err := regexp.Compile("[^a-zA-Z0-9_]+")
if err == nil {
hostname = reg.ReplaceAllString(hostname, "_")
}
if len(hostname) > 30 {
hostname = hostname[0:30]
}
return hostname
}

View file

@ -36,9 +36,7 @@ func GetPlaylistVideoIDs(channelName string, maxVideos int, stopChan stop.Chan,
} }
videoIDs := make([]string, 0, maxVideos) videoIDs := make([]string, 0, maxVideos)
for i, v := range ids { for i, v := range ids {
if v == "" { logrus.Debugf("%d - video id %s", i, v)
continue
}
if i >= maxVideos { if i >= maxVideos {
break break
} }
@ -49,7 +47,7 @@ func GetPlaylistVideoIDs(channelName string, maxVideos int, stopChan stop.Chan,
const releaseTimeFormat = "2006-01-02, 15:04:05 (MST)" const releaseTimeFormat = "2006-01-02, 15:04:05 (MST)"
func GetVideoInformation(videoID string, stopChan stop.Chan, pool *ip_manager.IPPool) (*ytdl.YtdlVideo, error) { func GetVideoInformation(config *sdk.APIConfig, videoID string, stopChan stop.Chan, ip *net.TCPAddr, pool *ip_manager.IPPool) (*ytdl.YtdlVideo, error) {
args := []string{ args := []string{
"--skip-download", "--skip-download",
"--write-info-json", "--write-info-json",
@ -79,6 +77,50 @@ func GetVideoInformation(videoID string, stopChan stop.Chan, pool *ip_manager.IP
return nil, errors.Err(err) return nil, errors.Err(err)
} }
// now get an accurate time
const maxTries = 5
tries := 0
GetTime:
tries++
t, err := getUploadTime(config, videoID, ip, video.UploadDate)
if err != nil {
//slack(":warning: Upload time error: %v", err)
if tries <= maxTries && (errors.Is(err, errNotScraped) || errors.Is(err, errUploadTimeEmpty) || errors.Is(err, errStatusParse) || errors.Is(err, errConnectionIssue)) {
err := triggerScrape(videoID, ip)
if err == nil {
time.Sleep(2 * time.Second) // let them scrape it
goto GetTime
} else {
//slack("triggering scrape returned error: %v", err)
}
} else if !errors.Is(err, errNotScraped) && !errors.Is(err, errUploadTimeEmpty) {
//slack(":warning: Error while trying to get accurate upload time for %s: %v", videoID, err)
if t == "" {
return nil, errors.Err(err)
} else {
t = "" //TODO: get rid of the other piece below?
}
}
// do fallback below
}
//slack("After all that, upload time for %s is %s", videoID, t)
if t != "" {
parsed, err := time.Parse("2006-01-02, 15:04:05 (MST)", t) // this will probably be UTC, but Go's timezone parsing is fucked up. it ignores the timezone in the date
if err != nil {
return nil, errors.Err(err)
}
//slack(":exclamation: Got an accurate time for %s", videoID)
video.UploadDateForReal = parsed
} else { //TODO: this is the piece that isn't needed!
slack(":warning: Could not get accurate time for %s. Falling back to time from upload ytdl: %s.", videoID, video.UploadDate)
// fall back to UploadDate from youtube-dl
video.UploadDateForReal, err = time.Parse("20060102", video.UploadDate)
if err != nil {
return nil, err
}
}
return video, nil return video, nil
} }
@ -168,8 +210,46 @@ func getUploadTime(config *sdk.APIConfig, videoID string, ip *net.TCPAddr, uploa
} }
} }
if time.Now().AddDate(0, 0, -3).After(ytdlUploadDate) {
return ytdlUploadDate.Format(releaseTimeFormat), nil return ytdlUploadDate.Format(releaseTimeFormat), nil
} }
client := getClient(ip)
req, err := http.NewRequest(http.MethodGet, "https://caa.iti.gr/get_verificationV3?url=https://www.youtube.com/watch?v="+videoID, nil)
if err != nil {
return ytdlUploadDate.Format(releaseTimeFormat), errors.Err(err)
}
req.Header.Set("User-Agent", ChromeUA)
res, err := client.Do(req)
if err != nil {
return ytdlUploadDate.Format(releaseTimeFormat), errors.Err(err)
}
defer res.Body.Close()
var uploadTime struct {
Time string `json:"video_upload_time"`
Message string `json:"message"`
Status string `json:"status"`
}
err = json.NewDecoder(res.Body).Decode(&uploadTime)
if err != nil {
return ytdlUploadDate.Format(releaseTimeFormat), errors.Err(err)
}
if uploadTime.Status == "ERROR1" {
return ytdlUploadDate.Format(releaseTimeFormat), errNotScraped
}
if uploadTime.Status == "" && strings.HasPrefix(uploadTime.Message, "CANNOT_RETRIEVE_REPORT_FOR_VIDEO_") {
return ytdlUploadDate.Format(releaseTimeFormat), errors.Err("cannot retrieve report for video")
}
if uploadTime.Time == "" {
return ytdlUploadDate.Format(releaseTimeFormat), errUploadTimeEmpty
}
return uploadTime.Time, nil
}
func getClient(ip *net.TCPAddr) *http.Client { func getClient(ip *net.TCPAddr) *http.Client {
if ip == nil { if ip == nil {
@ -194,7 +274,7 @@ func getClient(ip *net.TCPAddr) *http.Client {
const ( const (
GoogleBotUA = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)" GoogleBotUA = "Mozilla/5.0 (compatible; Googlebot/2.1; +http://www.google.com/bot.html)"
ChromeUA = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/104.0.0.0 Safari/537.36" ChromeUA = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.77 Safari/537.36"
maxAttempts = 3 maxAttempts = 3
extractionError = "YouTube said: Unable to extract video data" extractionError = "YouTube said: Unable to extract video data"
throttledError = "HTTP Error 429" throttledError = "HTTP Error 429"
@ -285,8 +365,7 @@ func runCmd(cmd *exec.Cmd, stopChan stop.Chan) ([]string, error) {
return nil, errors.Err("interrupted by user") return nil, errors.Err("interrupted by user")
case err := <-done: case err := <-done:
if err != nil { if err != nil {
//return nil, errors.Prefix("yt-dlp "+strings.Join(cmd.Args, " ")+" ["+string(errorLog)+"]", err) return nil, errors.Prefix("yt-dlp "+strings.Join(cmd.Args, " ")+" ["+string(errorLog)+"]", err)
return nil, errors.Prefix(string(errorLog), err)
} }
return strings.Split(strings.Replace(string(outLog), "\r\n", "\n", -1), "\n"), nil return strings.Split(strings.Replace(string(outLog), "\r\n", "\n", -1), "\n"), nil
} }

View file

@ -3,11 +3,7 @@ package downloader
import ( import (
"testing" "testing"
"github.com/lbryio/ytsync/v5/ip_manager"
"github.com/lbryio/ytsync/v5/sdk" "github.com/lbryio/ytsync/v5/sdk"
"github.com/lbryio/lbry.go/v2/extras/stop"
"github.com/sirupsen/logrus" "github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
) )
@ -23,18 +19,24 @@ func TestGetPlaylistVideoIDs(t *testing.T) {
} }
func TestGetVideoInformation(t *testing.T) { func TestGetVideoInformation(t *testing.T) {
s := stop.New() video, err := GetVideoInformation(nil, "zj7pXM9gE5M", nil, nil, nil)
ip, err := ip_manager.GetIPPool(s) if err != nil {
assert.NoError(t, err) logrus.Error(err)
video, err := GetVideoInformation("kDGOHNpRjzc", s.Ch(), ip) }
assert.NoError(t, err) if video != nil {
assert.NotNil(t, video)
logrus.Info(video.ID) logrus.Info(video.ID)
} }
}
func Test_getUploadTime(t *testing.T) { func Test_getUploadTime(t *testing.T) {
configs := sdk.APIConfig{} configs := sdk.APIConfig{
YoutubeAPIKey: "",
ApiURL: "https://api.lbry.com",
ApiToken: "Ht4NETrL5oWKyAaZkuSV68BKhtXkiLh5",
HostName: "test",
}
got, err := getUploadTime(&configs, "kDGOHNpRjzc", nil, "20060102") got, err := getUploadTime(&configs, "kDGOHNpRjzc", nil, "20060102")
assert.NoError(t, err) assert.NoError(t, err)
t.Log(got) t.Log(got)
} }

View file

@ -2,136 +2,149 @@ package ytdl
import ( import (
"time" "time"
"github.com/lbryio/ytsync/v5/sdk"
"github.com/sirupsen/logrus"
) )
type YtdlVideo struct { type YtdlVideo struct {
ID string `json:"id"`
Title string `json:"title"`
Thumbnails []Thumbnail `json:"thumbnails"`
Description string `json:"description"`
ChannelID string `json:"channel_id"`
Duration int `json:"duration"`
Categories []string `json:"categories"`
Tags []string `json:"tags"`
IsLive bool `json:"is_live"`
LiveStatus string `json:"live_status"`
ReleaseTimestamp *int64 `json:"release_timestamp"`
uploadDateForReal *time.Time
Availability string `json:"availability"`
ReleaseDate string `json:"release_date"`
UploadDate string `json:"upload_date"` UploadDate string `json:"upload_date"`
UploadDateForReal time.Time // you need to manually set this since the value in the API doesn't include the time
Extractor string `json:"extractor"`
Series interface{} `json:"series"`
Format string `json:"format"`
Vbr interface{} `json:"vbr"`
Chapters interface{} `json:"chapters"`
Height int `json:"height"`
LikeCount interface{} `json:"like_count"`
Duration int `json:"duration"`
Fulltitle string `json:"fulltitle"`
PlaylistIndex interface{} `json:"playlist_index"`
Album interface{} `json:"album"`
ViewCount int `json:"view_count"`
Playlist interface{} `json:"playlist"`
Title string `json:"title"`
Filename string `json:"_filename"`
Creator interface{} `json:"creator"`
Ext string `json:"ext"`
ID string `json:"id"`
DislikeCount interface{} `json:"dislike_count"`
AverageRating float64 `json:"average_rating"`
Abr float64 `json:"abr"`
UploaderURL string `json:"uploader_url"`
Categories []string `json:"categories"`
Fps float64 `json:"fps"`
StretchedRatio interface{} `json:"stretched_ratio"`
SeasonNumber interface{} `json:"season_number"`
Annotations interface{} `json:"annotations"`
WebpageURLBasename string `json:"webpage_url_basename"`
Acodec string `json:"acodec"`
DisplayID string `json:"display_id"`
//RequestedFormats []RequestedFormat `json:"requested_formats"`
//AutomaticCaptions struct{} `json:"automatic_captions"`
Description string `json:"description"`
Tags []string `json:"tags"`
Track interface{} `json:"track"`
RequestedSubtitles interface{} `json:"requested_subtitles"`
StartTime interface{} `json:"start_time"`
Uploader string `json:"uploader"`
ExtractorKey string `json:"extractor_key"`
FormatID string `json:"format_id"`
EpisodeNumber interface{} `json:"episode_number"`
UploaderID string `json:"uploader_id"`
//Subtitles struct{} `json:"subtitles"`
ReleaseYear interface{} `json:"release_year"`
Thumbnails []Thumbnail `json:"thumbnails"`
License interface{} `json:"license"`
Artist interface{} `json:"artist"`
AgeLimit int `json:"age_limit"`
ReleaseDate interface{} `json:"release_date"`
AltTitle interface{} `json:"alt_title"`
Thumbnail string `json:"thumbnail"`
ChannelID string `json:"channel_id"`
IsLive interface{} `json:"is_live"`
Width int `json:"width"`
EndTime interface{} `json:"end_time"`
WebpageURL string `json:"webpage_url"`
Formats []Format `json:"formats"`
ChannelURL string `json:"channel_url"`
Resolution interface{} `json:"resolution"`
Vcodec string `json:"vcodec"`
}
//WasLive bool `json:"was_live"` type RequestedFormat struct {
//Formats interface{} `json:"formats"` Asr interface{} `json:"asr"`
//Thumbnail string `json:"thumbnail"` Tbr float64 `json:"tbr"`
//Uploader string `json:"uploader"` Container string `json:"container"`
//UploaderID string `json:"uploader_id"` Language interface{} `json:"language"`
//UploaderURL string `json:"uploader_url"` Format string `json:"format"`
//ChannelURL string `json:"channel_url"` URL string `json:"url"`
//ViewCount int `json:"view_count"` Vcodec string `json:"vcodec"`
//AverageRating interface{} `json:"average_rating"` FormatNote string `json:"format_note"`
//AgeLimit int `json:"age_limit"` Height int `json:"height"`
//WebpageURL string `json:"webpage_url"` Width int `json:"width"`
//PlayableInEmbed bool `json:"playable_in_embed"` Ext string `json:"ext"`
//AutomaticCaptions interface{} `json:"automatic_captions"` FragmentBaseURL string `json:"fragment_base_url"`
//Subtitles interface{} `json:"subtitles"` Filesize interface{} `json:"filesize"`
//Chapters interface{} `json:"chapters"` Fps float64 `json:"fps"`
//LikeCount int `json:"like_count"` ManifestURL string `json:"manifest_url"`
//Channel string `json:"channel"` Protocol string `json:"protocol"`
//ChannelFollowerCount int `json:"channel_follower_count"` FormatID string `json:"format_id"`
//OriginalURL string `json:"original_url"` HTTPHeaders struct {
//WebpageURLBasename string `json:"webpage_url_basename"` AcceptCharset string `json:"Accept-Charset"`
//WebpageURLDomain string `json:"webpage_url_domain"` AcceptLanguage string `json:"Accept-Language"`
//Extractor string `json:"extractor"` AcceptEncoding string `json:"Accept-Encoding"`
//ExtractorKey string `json:"extractor_key"` Accept string `json:"Accept"`
//Playlist interface{} `json:"playlist"` UserAgent string `json:"User-Agent"`
//PlaylistIndex interface{} `json:"playlist_index"` } `json:"http_headers"`
//DisplayID string `json:"display_id"` Fragments []struct {
//Fulltitle string `json:"fulltitle"` Path string `json:"path"`
//DurationString string `json:"duration_string"` Duration float64 `json:"duration,omitempty"`
//RequestedSubtitles interface{} `json:"requested_subtitles"` } `json:"fragments"`
//HasDrm bool `json:"__has_drm"` Acodec string `json:"acodec"`
//RequestedFormats interface{} `json:"requested_formats"` Abr int `json:"abr,omitempty"`
//Format string `json:"format"` }
//FormatID string `json:"format_id"`
//Ext string `json:"ext"` type Format struct {
//Protocol string `json:"protocol"` Asr int `json:"asr"`
//Language interface{} `json:"language"` Filesize int `json:"filesize"`
//FormatNote string `json:"format_note"` FormatID string `json:"format_id"`
//FilesizeApprox int `json:"filesize_approx"` FormatNote string `json:"format_note"`
//Tbr float64 `json:"tbr"` Fps interface{} `json:"fps"`
//Width int `json:"width"` Height interface{} `json:"height"`
//Height int `json:"height"` Quality int `json:"quality"`
//Resolution string `json:"resolution"` Tbr float64 `json:"tbr"`
//Fps int `json:"fps"` URL string `json:"url"`
//DynamicRange string `json:"dynamic_range"` Width interface{} `json:"width"`
//Vcodec string `json:"vcodec"` Ext string `json:"ext"`
//Vbr float64 `json:"vbr"` Vcodec string `json:"vcodec"`
//StretchedRatio interface{} `json:"stretched_ratio"` Acodec string `json:"acodec"`
//Acodec string `json:"acodec"` Abr float64 `json:"abr,omitempty"`
//Abr float64 `json:"abr"` DownloaderOptions struct {
//Asr int `json:"asr"` HTTPChunkSize int `json:"http_chunk_size"`
//Epoch int `json:"epoch"` } `json:"downloader_options,omitempty"`
//Filename string `json:"filename"` Container string `json:"container,omitempty"`
//Urls string `json:"urls"` Format string `json:"format"`
//Type string `json:"_type"` Protocol string `json:"protocol"`
HTTPHeaders struct {
UserAgent string `json:"User-Agent"`
AcceptCharset string `json:"Accept-Charset"`
Accept string `json:"Accept"`
AcceptEncoding string `json:"Accept-Encoding"`
AcceptLanguage string `json:"Accept-Language"`
} `json:"http_headers"`
Vbr float64 `json:"vbr,omitempty"`
} }
type Thumbnail struct { type Thumbnail struct {
URL string `json:"url"` URL string `json:"url"`
Preference int `json:"preference"` Width int `json:"width"`
Resolution string `json:"resolution"`
ID string `json:"id"` ID string `json:"id"`
Height int `json:"height,omitempty"` Height int `json:"height"`
Width int `json:"width,omitempty"`
Resolution string `json:"resolution,omitempty"`
} }
func (v *YtdlVideo) GetUploadTime() time.Time { type HTTPHeaders struct {
//priority list: AcceptCharset string `json:"Accept-Charset"`
// release timestamp from yt AcceptLanguage string `json:"Accept-Language"`
// release timestamp from morty AcceptEncoding string `json:"Accept-Encoding"`
// release date from yt Accept string `json:"Accept"`
// upload date from yt UserAgent string `json:"User-Agent"`
if v.uploadDateForReal != nil {
return *v.uploadDateForReal
}
var ytdlReleaseTimestamp time.Time
if v.ReleaseTimestamp != nil && *v.ReleaseTimestamp > 0 {
ytdlReleaseTimestamp = time.Unix(*v.ReleaseTimestamp, 0).UTC()
}
//get morty timestamp
var mortyReleaseTimestamp time.Time
mortyRelease, err := sdk.GetAPIsConfigs().GetReleasedDate(v.ID)
if err != nil {
logrus.Error(err)
} else if mortyRelease != nil {
mortyReleaseTimestamp, err = time.ParseInLocation(time.RFC3339, mortyRelease.ReleaseTime, time.UTC)
if err != nil {
logrus.Error(err)
}
}
ytdlReleaseDate, err := time.Parse("20060102", v.ReleaseDate)
if err != nil {
logrus.Error(err)
}
ytdlUploadDate, err := time.Parse("20060102", v.UploadDate)
if err != nil {
logrus.Error(err)
}
if !ytdlReleaseTimestamp.IsZero() {
v.uploadDateForReal = &ytdlReleaseTimestamp
} else if !mortyReleaseTimestamp.IsZero() {
v.uploadDateForReal = &mortyReleaseTimestamp
} else if !ytdlReleaseDate.IsZero() {
v.uploadDateForReal = &ytdlReleaseDate
} else {
v.uploadDateForReal = &ytdlUploadDate
}
return *v.uploadDateForReal
} }

View file

@ -21,7 +21,7 @@ services:
## Wallet Server ## ## Wallet Server ##
################### ###################
walletserver: walletserver:
image: lbry/wallet-server:v0.101.1 image: lbry/wallet-server:latest-release
restart: always restart: always
environment: environment:
- DB_DIRECTORY=/database - DB_DIRECTORY=/database
@ -81,7 +81,6 @@ services:
- walletserver - walletserver
environment: environment:
- LBRY_STREAMING_SERVER=0.0.0.0:5280 - LBRY_STREAMING_SERVER=0.0.0.0:5280
- LBRY_FEE_PER_NAME_CHAR=0
volumes: volumes:
- "./persist/.lbrynet:/home/lbrynet" - "./persist/.lbrynet:/home/lbrynet"
- ".:/etc/lbry" #Put your daemon_settings.yml here - ".:/etc/lbry" #Put your daemon_settings.yml here
@ -110,7 +109,7 @@ services:
## Internal APIs ## ## Internal APIs ##
################### ###################
internalapis: internalapis:
image: odyseeteam/internal-apis:master image: lbry/internal-apis:master
restart: "no" restart: "no"
ports: ports:
- "15400:8080" - "15400:8080"
@ -128,7 +127,7 @@ services:
## Chainquery ## ## Chainquery ##
################ ################
chainquery: chainquery:
image: odyseeteam/chainquery:master image: lbry/chainquery:master
restart: "no" restart: "no"
ports: ports:
- 6300:6300 - 6300:6300

View file

@ -14,8 +14,11 @@ export LOCAL_TMP_DIR="/var/tmp:/var/tmp"
touch -a .env && set -o allexport; source ./.env; set +o allexport touch -a .env && set -o allexport; source ./.env; set +o allexport
echo "LOCAL_TMP_DIR=$LOCAL_TMP_DIR" echo "LOCAL_TMP_DIR=$LOCAL_TMP_DIR"
# Compose settings - docker only # Compose settings - docker only
export SLACK_CHANNEL="ytsync-travis"
export LBRY_API_TOKEN="ytsyntoken"
export LBRY_WEB_API="http://localhost:15400"
export LBRYNET_ADDRESS="http://localhost:15100" export LBRYNET_ADDRESS="http://localhost:15100"
export LBRYCRD_STRING="tcp://lbry:lbry@localhost:15200" #required for supporty export LBRYCRD_STRING="tcp://lbry:lbry@localhost:15200"
export LBRYNET_USE_DOCKER=true export LBRYNET_USE_DOCKER=true
export REFLECT_BLOBS=false export REFLECT_BLOBS=false
export CLEAN_ON_STARTUP=true export CLEAN_ON_STARTUP=true
@ -47,9 +50,9 @@ until curl --output /dev/null --silent --head --fail http://localhost:15400; do
done done
echo "successfully started..." echo "successfully started..."
channelToSync="UCMn-zv1SE-2y6vyewscfFqw" channelToSync="UCGyoEsIRjmnmzrsB67DhrOA"
channelName=@whatever"$(date +%s)" channelName=@Alaminemoh11"$(date +%s)"
latestVideoID="yPJgjiMbmX0" latestVideoID="ejWF7Jjdgmc"
#Data Setup for test #Data Setup for test
./data_setup.sh "$channelName" "$channelToSync" "$latestVideoID" ./data_setup.sh "$channelName" "$channelToSync" "$latestVideoID"

167
go.mod
View file

@ -1,5 +1,3 @@
go 1.17
module github.com/lbryio/ytsync/v5 module github.com/lbryio/ytsync/v5
replace github.com/btcsuite/btcd => github.com/lbryio/lbrycrd.go v0.0.0-20200203050410-e1076f12bf19 replace github.com/btcsuite/btcd => github.com/lbryio/lbrycrd.go v0.0.0-20200203050410-e1076f12bf19
@ -8,144 +6,37 @@ replace github.com/btcsuite/btcd => github.com/lbryio/lbrycrd.go v0.0.0-20200203
//replace github.com/lbryio/reflector.go => /home/niko/go/src/github.com/lbryio/reflector.go/ //replace github.com/lbryio/reflector.go => /home/niko/go/src/github.com/lbryio/reflector.go/
require ( require (
github.com/Microsoft/go-winio v0.4.14 // indirect
github.com/abadojack/whatlanggo v1.0.1 github.com/abadojack/whatlanggo v1.0.1
github.com/asaskevich/govalidator v0.0.0-20210307081110-f21760c49a8d github.com/asaskevich/govalidator v0.0.0-20200819183940-29e1ff8eb0bb
github.com/aws/aws-sdk-go v1.44.6 github.com/aws/aws-sdk-go v1.25.9
github.com/davecgh/go-spew v1.1.1 github.com/davecgh/go-spew v1.1.1
github.com/docker/docker v20.10.17+incompatible github.com/docker/distribution v2.7.1+incompatible // indirect
github.com/lbryio/lbry.go/v2 v2.7.2-0.20220815204100-2adb8af5b68c github.com/docker/docker v1.13.1
github.com/lbryio/reflector.go v1.1.3-0.20220730181028-f5d30b1a6e79 github.com/docker/go-connections v0.4.0 // indirect
github.com/mitchellh/go-ps v1.0.0 github.com/docker/go-units v0.4.0 // indirect
github.com/prometheus/client_golang v1.12.1 github.com/hashicorp/go-immutable-radix v1.1.0 // indirect
github.com/shopspring/decimal v1.3.1 github.com/hashicorp/go-sockaddr v1.0.2 // indirect
github.com/sirupsen/logrus v1.9.0 github.com/hashicorp/memberlist v0.1.5 // indirect
github.com/spf13/cobra v1.4.0 github.com/hashicorp/serf v0.8.5 // indirect
github.com/stretchr/testify v1.7.1 github.com/kr/pretty v0.2.1 // indirect
github.com/tkanos/gonfig v0.0.0-20210106201359-53e13348de2f github.com/lbryio/lbry.go/v2 v2.7.2-0.20210824154606-3e18b74da08b
github.com/vbauerster/mpb/v7 v7.4.1 github.com/lbryio/reflector.go v1.1.3-0.20210412225256-4392c9724262
gopkg.in/vansante/go-ffprobe.v2 v2.0.3 github.com/miekg/dns v1.1.22 // indirect
github.com/mitchellh/go-ps v0.0.0-20190716172923-621e5597135b
github.com/opencontainers/go-digest v1.0.0-rc1 // indirect
github.com/phayes/freeport v0.0.0-20180830031419-95f893ade6f2 // indirect
github.com/prometheus/client_golang v0.9.3
github.com/shopspring/decimal v0.0.0-20191009025716-f1972eb1d1f5
github.com/sirupsen/logrus v1.4.2
github.com/spf13/cobra v0.0.5
github.com/spf13/pflag v1.0.5 // indirect
github.com/stretchr/testify v1.7.0
github.com/vbauerster/mpb/v7 v7.0.2
google.golang.org/appengine v1.6.5 // indirect
gopkg.in/ini.v1 v1.60.2 // indirect
gopkg.in/vansante/go-ffprobe.v2 v2.0.2
gotest.tools v2.2.0+incompatible gotest.tools v2.2.0+incompatible
) )
require ( go 1.13
github.com/Microsoft/go-winio v0.5.1 // indirect
github.com/VividCortex/ewma v1.2.0 // indirect
github.com/acarl005/stripansi v0.0.0-20180116102854-5a71ef0e047d // indirect
github.com/armon/go-metrics v0.0.0-20190430140413-ec5e00d3c878 // indirect
github.com/beorn7/perks v1.0.1 // indirect
github.com/bluele/gcache v0.0.2 // indirect
github.com/brk0v/directio v0.0.0-20190225130936-69406e757cf7 // indirect
github.com/btcsuite/btcd v0.0.0-20190824003749-130ea5bddde3 // indirect
github.com/btcsuite/btclog v0.0.0-20170628155309-84c8d2346e9f // indirect
github.com/btcsuite/btcutil v0.0.0-20190425235716-9e5f4b9a998d // indirect
github.com/btcsuite/go-socks v0.0.0-20170105172521-4720035b7bfd // indirect
github.com/btcsuite/websocket v0.0.0-20150119174127-31079b680792 // indirect
github.com/c2h5oh/datasize v0.0.0-20200825124411-48ed595a09d2 // indirect
github.com/cespare/xxhash/v2 v2.1.2 // indirect
github.com/cheekybits/genny v1.0.0 // indirect
github.com/docker/distribution v2.8.1+incompatible // indirect
github.com/docker/go-connections v0.4.0 // indirect
github.com/docker/go-units v0.4.0 // indirect
github.com/ekyoung/gin-nice-recovery v0.0.0-20160510022553-1654dca486db // indirect
github.com/fatih/structs v1.1.0 // indirect
github.com/fsnotify/fsnotify v1.4.9 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/gin-gonic/gin v1.7.7 // indirect
github.com/go-errors/errors v1.1.1 // indirect
github.com/go-ini/ini v1.48.0 // indirect
github.com/go-playground/locales v0.13.0 // indirect
github.com/go-playground/universal-translator v0.17.0 // indirect
github.com/go-playground/validator/v10 v10.4.1 // indirect
github.com/go-sql-driver/mysql v1.6.0 // indirect
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0 // indirect
github.com/gofrs/uuid v3.2.0+incompatible // indirect
github.com/gogo/protobuf v1.3.2 // indirect
github.com/golang/protobuf v1.5.2 // indirect
github.com/google/btree v1.0.1 // indirect
github.com/google/go-cmp v0.5.7 // indirect
github.com/gorilla/mux v1.8.0 // indirect
github.com/gorilla/rpc v1.2.0 // indirect
github.com/gorilla/websocket v1.4.2 // indirect
github.com/hashicorp/errwrap v1.1.0 // indirect
github.com/hashicorp/go-immutable-radix v1.1.0 // indirect
github.com/hashicorp/go-msgpack v0.5.5 // indirect
github.com/hashicorp/go-multierror v1.1.1 // indirect
github.com/hashicorp/go-sockaddr v1.0.2 // indirect
github.com/hashicorp/golang-lru v0.5.4 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/hashicorp/memberlist v0.3.0 // indirect
github.com/hashicorp/serf v0.9.7 // indirect
github.com/inconshreveable/go-update v0.0.0-20160112193335-8152e7eb6ccf // indirect
github.com/inconshreveable/mousetrap v1.0.0 // indirect
github.com/jmespath/go-jmespath v0.4.0 // indirect
github.com/johntdyer/slack-go v0.0.0-20180213144715-95fac1160b22 // indirect
github.com/johntdyer/slackrus v0.0.0-20211215141436-33e4a270affb // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/karrick/godirwalk v1.17.0 // indirect
github.com/kr/text v0.2.0 // indirect
github.com/lbryio/chainquery v1.9.0 // indirect
github.com/lbryio/lbry.go v1.1.2 // indirect
github.com/lbryio/types v0.0.0-20220224142228-73610f6654a6 // indirect
github.com/leodido/go-urn v1.2.0 // indirect
github.com/lucas-clemente/quic-go v0.28.1 // indirect
github.com/lyoshenka/bencode v0.0.0-20180323155644-b7abd7672df5 // indirect
github.com/magiconair/properties v1.8.1 // indirect
github.com/marten-seemann/qpack v0.2.1 // indirect
github.com/marten-seemann/qtls-go1-16 v0.1.5 // indirect
github.com/marten-seemann/qtls-go1-17 v0.1.2 // indirect
github.com/marten-seemann/qtls-go1-18 v0.1.2 // indirect
github.com/marten-seemann/qtls-go1-19 v0.1.0-beta.1 // indirect
github.com/mattn/go-isatty v0.0.12 // indirect
github.com/mattn/go-runewidth v0.0.13 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.2-0.20181231171920-c182affec369 // indirect
github.com/miekg/dns v1.1.41 // indirect
github.com/mitchellh/mapstructure v1.5.0 // indirect
github.com/moby/term v0.0.0-20210619224110-3f7ff695adc6 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/morikuni/aec v1.0.0 // indirect
github.com/nxadm/tail v1.4.8 // indirect
github.com/onsi/ginkgo v1.16.4 // indirect
github.com/onsi/gomega v1.17.0 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.0.3-0.20211202183452-c5a74bcca799 // indirect
github.com/pelletier/go-toml v1.9.3 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/prometheus/client_model v0.2.0 // indirect
github.com/prometheus/common v0.32.1 // indirect
github.com/prometheus/procfs v0.7.3 // indirect
github.com/rivo/uniseg v0.2.0 // indirect
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529 // indirect
github.com/slack-go/slack v0.10.3 // indirect
github.com/spf13/afero v1.4.1 // indirect
github.com/spf13/cast v1.4.1 // indirect
github.com/spf13/jwalterweatherman v1.0.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/spf13/viper v1.7.1 // indirect
github.com/subosito/gotenv v1.2.0 // indirect
github.com/ugorji/go/codec v1.1.7 // indirect
github.com/volatiletech/inflect v0.0.0-20170731032912-e7201282ae8d // indirect
github.com/volatiletech/null v8.0.0+incompatible // indirect
github.com/volatiletech/sqlboiler v3.4.0+incompatible // indirect
github.com/ybbus/jsonrpc v2.1.2+incompatible // indirect
go.uber.org/atomic v1.9.0 // indirect
golang.org/x/crypto v0.0.0-20210817164053-32db794688a5 // indirect
golang.org/x/mod v0.4.2 // indirect
golang.org/x/net v0.0.0-20220624214902-1bab6f366d9e // indirect
golang.org/x/sync v0.0.0-20210220032951-036812b2e83c // indirect
golang.org/x/sys v0.0.0-20220715151400-c0bba94af5f8 // indirect
golang.org/x/text v0.3.7 // indirect
golang.org/x/time v0.0.0-20210723032227-1f47c861a9ac // indirect
golang.org/x/tools v0.1.5 // indirect
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
google.golang.org/protobuf v1.27.1 // indirect
gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
gopkg.in/ini.v1 v1.60.2 // indirect
gopkg.in/nullbio/null.v6 v6.0.0-20161116030900-40264a2e6b79 // indirect
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
gotest.tools/v3 v3.2.0 // indirect
)

605
go.sum

File diff suppressed because it is too large Load diff

253
local/local.go Normal file
View file

@ -0,0 +1,253 @@
package local
import (
"errors"
"os"
"regexp"
"strings"
"time"
log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
"github.com/abadojack/whatlanggo"
"github.com/lbryio/lbry.go/v2/extras/util"
"github.com/lbryio/ytsync/v5/namer"
"github.com/lbryio/ytsync/v5/tags_manager"
)
type SyncContext struct {
DryRun bool
KeepCache bool
ReflectStreams bool
TempDir string
LbrynetAddr string
ChannelID string
PublishBid float64
YouTubeSourceConfig *YouTubeSourceConfig
}
func (c *SyncContext) Validate() error {
if c.TempDir == "" {
return errors.New("No TempDir provided")
}
if c.LbrynetAddr == "" {
return errors.New("No Lbrynet address provided")
}
if c.ChannelID == "" {
return errors.New("No channel ID provided")
}
if c.PublishBid <= 0.0 {
return errors.New("Publish bid is not greater than zero")
}
return nil
}
type YouTubeSourceConfig struct {
YouTubeAPIKey string
}
var syncContext SyncContext
func AddCommand(rootCmd *cobra.Command) {
cmd := &cobra.Command{
Use: "local",
Short: "run a personal ytsync",
Run: localCmd,
Args: cobra.ExactArgs(1),
}
cmd.Flags().BoolVar(&syncContext.DryRun, "dry-run", false, "Display information about the stream publishing, but do not publish the stream")
cmd.Flags().BoolVar(&syncContext.KeepCache, "keep-cache", false, "Don't delete local files after publishing.")
cmd.Flags().BoolVar(&syncContext.ReflectStreams, "reflect-streams", true, "Require published streams to be reflected.")
cmd.Flags().StringVar(&syncContext.TempDir, "temp-dir", getEnvDefault("TEMP_DIR", ""), "directory to use for temporary files")
cmd.Flags().Float64Var(&syncContext.PublishBid, "publish-bid", 0.01, "Bid amount for the stream claim")
cmd.Flags().StringVar(&syncContext.LbrynetAddr, "lbrynet-address", getEnvDefault("LBRYNET_ADDRESS", ""), "JSONRPC address of the local LBRYNet daemon")
cmd.Flags().StringVar(&syncContext.ChannelID, "channel-id", "", "LBRY channel ID to publish to")
// For now, assume source is always YouTube
syncContext.YouTubeSourceConfig = &YouTubeSourceConfig{}
cmd.Flags().StringVar(&syncContext.YouTubeSourceConfig.YouTubeAPIKey, "youtube-api-key", getEnvDefault("YOUTUBE_API_KEY", ""), "YouTube API Key")
rootCmd.AddCommand(cmd)
}
func getEnvDefault(key, defaultValue string) string {
if value, ok := os.LookupEnv(key); ok {
return value
}
return defaultValue
}
func localCmd(cmd *cobra.Command, args []string) {
err := syncContext.Validate()
if err != nil {
log.Error(err)
return
}
videoID := args[0]
log.Debugf("Running sync for video ID %s", videoID)
var publisher VideoPublisher
publisher, err = NewLocalSDKPublisher(syncContext.LbrynetAddr, syncContext.ChannelID, syncContext.PublishBid)
if err != nil {
log.Errorf("Error setting up publisher: %v", err)
return
}
var videoSource VideoSource
if syncContext.YouTubeSourceConfig != nil {
videoSource, err = NewYtdlVideoSource(syncContext.TempDir, syncContext.YouTubeSourceConfig)
if err != nil {
log.Errorf("Error setting up video source: %v", err)
return
}
}
sourceVideo, err := videoSource.GetVideo(videoID)
if err != nil {
log.Errorf("Error getting source video: %v", err)
return
}
processedVideo, err := processVideoForPublishing(*sourceVideo, syncContext.ChannelID)
if err != nil {
log.Errorf("Error processing source video for publishing: %v", err)
return
}
if syncContext.DryRun {
log.Infoln("This is a dry run. Nothing will be published.")
log.Infof("The local file %s would be published to channel ID %s as %s.", processedVideo.FullLocalPath, syncContext.ChannelID, processedVideo.ClaimName)
log.Debugf("Object to be published: %v", processedVideo)
} else {
doneReflectingCh, err := publisher.Publish(*processedVideo, syncContext.ReflectStreams)
if err != nil {
log.Errorf("Error publishing video: %v", err)
return
}
if syncContext.ReflectStreams {
err = <-doneReflectingCh
if err != nil {
log.Errorf("Error while wating for stream to reflect: %v", err)
}
} else {
log.Debugln("Not waiting for stream to reflect.")
}
}
if !syncContext.KeepCache {
log.Infof("Deleting local files.")
err = videoSource.DeleteLocalCache(videoID)
if err != nil {
log.Errorf("Error deleting local files for video %s: %v", videoID, err)
}
}
log.Info("Done")
}
type SourceVideo struct {
ID string
Title *string
Description *string
SourceURL string
Languages []string
Tags []string
ReleaseTime *int64
ThumbnailURL *string
FullLocalPath string
}
type PublishableVideo struct {
ID string
ClaimName string
Title string
Description string
SourceURL string
Languages []string
Tags []string
ReleaseTime int64
ThumbnailURL string
FullLocalPath string
}
func processVideoForPublishing(source SourceVideo, channelID string) (*PublishableVideo, error) {
tags, err := tags_manager.SanitizeTags(source.Tags, channelID)
if err != nil {
log.Errorf("Error sanitizing tags: %v", err)
return nil, err
}
descriptionSample := ""
if source.Description != nil {
urlsRegex := regexp.MustCompile(`(?m) ?(f|ht)(tp)(s?)(://)(.*)[.|/](.*)`)
descriptionSample = urlsRegex.ReplaceAllString(*source.Description, "")
}
info := whatlanggo.Detect(descriptionSample)
title := ""
if source.Title != nil {
title = *source.Title
}
info2 := whatlanggo.Detect(title)
var languages []string = nil
if info.IsReliable() && info.Lang.Iso6391() != "" {
language := info.Lang.Iso6391()
languages = []string{language}
} else if info2.IsReliable() && info2.Lang.Iso6391() != "" {
language := info2.Lang.Iso6391()
languages = []string{language}
}
claimName := namer.NewNamer().GetNextName(title)
thumbnailURL := source.ThumbnailURL
if thumbnailURL == nil {
thumbnailURL = util.PtrToString("")
}
releaseTime := source.ReleaseTime
if releaseTime == nil {
releaseTime = util.PtrToInt64(time.Now().Unix())
}
processed := PublishableVideo {
ClaimName: claimName,
Title: title,
Description: getAbbrevDescription(source),
Languages: languages,
Tags: tags,
ReleaseTime: *releaseTime,
ThumbnailURL: *thumbnailURL,
FullLocalPath: source.FullLocalPath,
}
log.Debugf("Video prepared for publication: %v", processed)
return &processed, nil
}
func getAbbrevDescription(v SourceVideo) string {
if v.Description == nil {
return v.SourceURL
}
additionalDescription := "\n...\n" + v.SourceURL
maxLength := 2800 - len(additionalDescription)
description := strings.TrimSpace(*v.Description)
if len(description) > maxLength {
description = description[:maxLength]
}
return description + additionalDescription
}
type VideoSource interface {
GetVideo(id string) (*SourceVideo, error)
DeleteLocalCache(id string) error
}
type VideoPublisher interface {
Publish(video PublishableVideo, reflectStream bool) (chan error, error)
}

125
local/localSDKPublisher.go Normal file
View file

@ -0,0 +1,125 @@
package local
import (
"errors"
"sort"
"time"
log "github.com/sirupsen/logrus"
"github.com/lbryio/lbry.go/v2/extras/jsonrpc"
"github.com/lbryio/lbry.go/v2/extras/util"
)
type LocalSDKPublisher struct {
channelID string
publishBid float64
lbrynet *jsonrpc.Client
}
func NewLocalSDKPublisher(sdkAddr, channelID string, publishBid float64) (*LocalSDKPublisher, error) {
lbrynet := jsonrpc.NewClient(sdkAddr)
lbrynet.SetRPCTimeout(5 * time.Minute)
status, err := lbrynet.Status()
if err != nil {
return nil, err
}
if !status.IsRunning {
return nil, errors.New("SDK is not running")
}
// Should check to see if the SDK owns the channel
// Should check to see if wallet is unlocked
// but jsonrpc.Client doesn't have WalletStatus method
// so skip for now
// Should check to see if streams are configured to be reflected and warn if not
// but jsonrpc.Client doesn't have SettingsGet method to see if streams are reflected
// so use File.UploadingToReflector as a proxy for now
publisher := LocalSDKPublisher {
channelID: channelID,
publishBid: publishBid,
lbrynet: lbrynet,
}
return &publisher, nil
}
func (p *LocalSDKPublisher) Publish(video PublishableVideo, reflectStream bool) (chan error, error) {
streamCreateOptions := jsonrpc.StreamCreateOptions {
ClaimCreateOptions: jsonrpc.ClaimCreateOptions {
Title: &video.Title,
Description: &video.Description,
Languages: video.Languages,
ThumbnailURL: &video.ThumbnailURL,
Tags: video.Tags,
},
ReleaseTime: &video.ReleaseTime,
ChannelID: &p.channelID,
License: util.PtrToString("Copyrighted (contact publisher)"),
}
txSummary, err := p.lbrynet.StreamCreate(video.ClaimName, video.FullLocalPath, p.publishBid, streamCreateOptions)
if err != nil {
return nil, err
}
if !reflectStream {
return nil, nil
}
done := make(chan error, 1)
go func() {
for {
fileListResponse, fileIndex, err := findFileByTxid(p.lbrynet, txSummary.Txid)
if err != nil {
log.Errorf("Error finding file by txid: %v", err)
done <- err
return
}
if fileListResponse == nil {
log.Errorf("Could not find file in list with correct txid")
done <- err
return
}
fileStatus := fileListResponse.Items[fileIndex]
if fileStatus.IsFullyReflected {
log.Info("Stream is fully reflected")
break
}
if !fileStatus.UploadingToReflector {
log.Error("Stream is not being uploaded to a reflector. Check your lbrynet settings if this is a mistake.")
done <- errors.New("Stream is not being reflected (check lbrynet settings).")
return
}
log.Infof("Stream reflector progress: %d%%", fileStatus.ReflectorProgress)
time.Sleep(5 * time.Second)
}
done <- nil
}()
return done, nil
}
// if jsonrpc.Client.FileList is extended to match the actual jsonrpc schema, this can be removed
func findFileByTxid(client *jsonrpc.Client, txid string) (*jsonrpc.FileListResponse, int, error) {
response, err := client.FileList(0, 20)
for {
if err != nil {
log.Errorf("Error getting file list page: %v", err)
return nil, 0, err
}
index := sort.Search(len(response.Items), func (i int) bool { return response.Items[i].Txid == txid })
if index < len(response.Items) {
return response, index, nil
}
if response.Page >= response.TotalPages {
return nil, 0, nil
}
response, err = client.FileList(response.Page + 1, 20)
}
}

69
local/readme.md Normal file
View file

@ -0,0 +1,69 @@
# Running ytsync locally
## Requirements
- LBRY SDK (what do we actually need this for?)
- youtube-dl
- enough space to cache stuff
- YouTube data API key
## Process
### Ensuring requirements are met
- claim channel if there isn't one yet
- or easier, just error if no channel
- enough lbc in wallet?
### Getting a YouTube API key
To access the YouTube data API, you will first need some kind of google account.
The API has two methods of authentication, OAuth2 and API keys. This application uses API keys.
These API keys are basically like passwords, and so once obtained, they should not be shared.
The instructions for obtaining an API key are copied below from [here](https://developers.google.com/youtube/registering_an_application):
1. Open the [Credentials page](https://console.developers.google.com/apis/credentials) in the API Console.
2. Create an API key in the Console by clicking **Create credentials > API key**. You can restrict the key before using it in production by clicking **Restrict key** and selecting one of the **Restrictions**.
To keep your API keys secure, follow the [best practices for securely using API keys](https://cloud.google.com/docs/authentication/api-keys).
### Options to figure out what's already synced
- simplest: assume nothing is synced yet
- assume everything before some video is synced
- get/put sync info from Odysee by proving you have private key for channel
- tag videos as having been synced from youtube so we can ensure accuracy
- hardest: scan channel and try to match up which videos are not synced yet
### Central DB
- prove you have a channel's private key to get info about that channel
- proper queue instead of sleeping for N minutes between syncs
### Syncing a single video
- downloading it
- thumbnails
- metadata
- having enough LBC for publish(es)
- automated error handling
- getting a human involved for errors that can't be handled automatically
- reflecting
### Continuous Sync
- running in background
- storing local state
- interactions with our central ytsync db
- dealing with yt throttling
### Debugging
- dry-running the whole thing

45
local/youtubeEnricher.go Normal file
View file

@ -0,0 +1,45 @@
package local
import (
"time"
log "github.com/sirupsen/logrus"
"github.com/lbryio/lbry.go/v2/extras/util"
)
type YouTubeVideoEnricher interface {
EnrichMissing(source *SourceVideo) error
}
type YouTubeAPIVideoEnricher struct {
api *YouTubeAPI
}
func NewYouTubeAPIVideoEnricher(apiKey string) (*YouTubeAPIVideoEnricher) {
enricher := YouTubeAPIVideoEnricher{
api: NewYouTubeAPI(apiKey),
}
return &enricher
}
func (e *YouTubeAPIVideoEnricher) EnrichMissing(source *SourceVideo) error {
if source.ReleaseTime != nil {
log.Debugf("Video %s does not need enrichment. YouTubeAPIVideoEnricher is skipping.", source.ID)
return nil
}
snippet, err := e.api.GetVideoSnippet(source.ID)
if err != nil {
log.Errorf("Error snippet data for video %s: %v", err)
return err
}
publishedAt, err := time.Parse(time.RFC3339, snippet.PublishedAt)
if err != nil {
log.Errorf("Error converting publishedAt to timestamp: %v", err)
} else {
source.ReleaseTime = util.PtrToInt64(publishedAt.Unix())
}
return nil
}

83
local/ytapi.go Normal file
View file

@ -0,0 +1,83 @@
package local
import (
"encoding/json"
"fmt"
"io"
"net/http"
"time"
log "github.com/sirupsen/logrus"
)
type YouTubeAPI struct {
apiKey string
client *http.Client
}
func NewYouTubeAPI(apiKey string) (*YouTubeAPI) {
client := &http.Client {
Transport: &http.Transport{
MaxIdleConns: 10,
IdleConnTimeout: 30 * time.Second,
DisableCompression: true,
},
}
api := YouTubeAPI {
apiKey: apiKey,
client: client,
}
return &api
}
func (a *YouTubeAPI) GetVideoSnippet(videoID string) (*VideoSnippet, error) {
req, err := http.NewRequest("GET", "https://youtube.googleapis.com/youtube/v3/videos", nil)
if err != nil {
log.Errorf("Error creating http client for YouTube API: %v", err)
return nil, err
}
query := req.URL.Query()
query.Add("part", "snippet")
query.Add("id", videoID)
query.Add("key", a.apiKey)
req.URL.RawQuery = query.Encode()
req.Header.Add("Accept", "application/json")
resp, err := a.client.Do(req)
defer resp.Body.Close()
if err != nil {
log.Errorf("Error from YouTube API: %v", err)
return nil, err
}
body, err := io.ReadAll(resp.Body)
log.Tracef("Response from YouTube API: %s", string(body[:]))
var result videoListResponse
err = json.Unmarshal(body, &result)
if err != nil {
log.Errorf("Error deserializing video list response from YouTube API: %v", err)
return nil, err
}
if len(result.Items) != 1 {
err = fmt.Errorf("YouTube API responded with incorrect number of snippets (%d) while attempting to get snippet data for video %s", len(result.Items), videoID)
return nil, err
}
return &result.Items[0].Snippet, nil
}
type videoListResponse struct {
Items []struct {
Snippet VideoSnippet `json:"snippet"`
} `json:"items"`
}
type VideoSnippet struct {
PublishedAt string `json:"publishedAt"`
}

239
local/ytdl.go Normal file
View file

@ -0,0 +1,239 @@
package local
import (
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"os"
"os/exec"
"path"
"strings"
log "github.com/sirupsen/logrus"
"github.com/lbryio/ytsync/v5/downloader/ytdl"
)
type Ytdl struct {
DownloadDir string
}
func NewYtdl(downloadDir string) (*Ytdl, error) {
// TODO validate download dir
y := Ytdl {
DownloadDir: downloadDir,
}
return &y, nil
}
func (y *Ytdl) GetVideoMetadata(videoID string) (*ytdl.YtdlVideo, error) {
metadataPath, err := y.GetVideoMetadataFile(videoID)
if err != nil {
return nil, err
}
metadataBytes, err := os.ReadFile(metadataPath)
if err != nil {
return nil, err
}
var metadata *ytdl.YtdlVideo
err = json.Unmarshal(metadataBytes, &metadata)
if err != nil {
return nil, err
}
return metadata, nil
}
func (y *Ytdl) GetVideoMetadataFile(videoID string) (string, error) {
basePath := path.Join(y.DownloadDir, videoID)
metadataPath := basePath + ".info.json"
_, err := os.Stat(metadataPath)
if err != nil && !os.IsNotExist(err) {
log.Errorf("Error determining if video metadata already exists: %v", err)
return "", err
} else if err != nil {
log.Debugf("Metadata file for video %s does not exist. Downloading now.", videoID)
err = downloadVideoMetadata(basePath, videoID)
if err != nil {
return "", err
}
}
return metadataPath, nil
}
func (y *Ytdl) GetVideoFile(videoID string) (string, error) {
videoPath, err := findDownloadedVideo(y.DownloadDir, videoID)
if err != nil {
return "", err
}
if videoPath != nil {
return *videoPath, nil
}
basePath := path.Join(y.DownloadDir, videoID)
metadataPath, err := y.GetVideoMetadataFile(videoID)
if err != nil {
log.Errorf("Error getting metadata path in preparation for video download: %v", err)
return "", err
}
err = downloadVideo(basePath, metadataPath)
if err != nil {
return "", nil
}
videoPath, err = findDownloadedVideo(y.DownloadDir, videoID)
if err != nil {
log.Errorf("Error from findDownloadedVideo() after already succeeding once: %v", err)
return "", err
}
if videoPath == nil {
return "", errors.New("Could not find a downloaded video after successful download.")
}
return *videoPath, nil
}
func (y *Ytdl) DeleteVideoFiles(videoID string) error {
files, err := ioutil.ReadDir(y.DownloadDir)
if err != nil {
return err
}
for _, f := range files {
if f.IsDir() {
continue
}
if strings.Contains(f.Name(), videoID) {
videoPath := path.Join(y.DownloadDir, f.Name())
err = os.Remove(videoPath)
if err != nil {
log.Errorf("Error while deleting file %s: %v", y.DownloadDir, err)
return err
}
}
}
return nil
}
func deleteFile(path string) error {
_, err := os.Stat(path)
if err != nil && !os.IsNotExist(err) {
log.Errorf("Error determining if file %s exists: %v", path, err)
return err
} else if err != nil {
log.Debugf("File %s does not exist. Skipping deletion.", path)
return nil
}
return os.Remove(path)
}
func findDownloadedVideo(videoDir, videoID string) (*string, error) {
files, err := ioutil.ReadDir(videoDir)
if err != nil {
return nil, err
}
for _, f := range files {
if f.IsDir() {
continue
}
if path.Ext(f.Name()) == ".mp4" && strings.Contains(f.Name(), videoID) {
videoPath := path.Join(videoDir, f.Name())
return &videoPath, nil
}
}
return nil, nil
}
func downloadVideoMetadata(basePath, videoID string) error {
ytdlArgs := []string{
"--skip-download",
"--write-info-json",
"--force-overwrites",
fmt.Sprintf("https://www.youtube.com/watch?v=%s", videoID),
"--cookies",
"cookies.txt",
"-o",
basePath,
}
ytdlCmd := exec.Command("yt-dlp", ytdlArgs...)
output, err := runCmd(ytdlCmd)
log.Debug(output)
return err
}
func downloadVideo(basePath, metadataPath string) error {
ytdlArgs := []string{
"--no-progress",
"-o",
basePath,
"--merge-output-format",
"mp4",
"--postprocessor-args",
"ffmpeg:-movflags faststart",
"--abort-on-unavailable-fragment",
"--fragment-retries",
"1",
"--cookies",
"cookies.txt",
"--extractor-args",
"youtube:player_client=android",
"--load-info-json",
metadataPath,
"-fbestvideo[ext=mp4][vcodec!*=av01][height<=720]+bestaudio[ext!=webm][format_id!=258][format_id!=251][format_id!=256][format_id!=327]",
}
ytdlCmd := exec.Command("yt-dlp", ytdlArgs...)
output, err := runCmd(ytdlCmd)
log.Debug(output)
return err
}
func runCmd(cmd *exec.Cmd) ([]string, error) {
log.Infof("running cmd: %s", strings.Join(cmd.Args, " "))
var err error
stderr, err := cmd.StderrPipe()
if err != nil {
return nil, err
}
stdout, err := cmd.StdoutPipe()
if err != nil {
return nil, err
}
err = cmd.Start()
if err != nil {
return nil, err
}
outLog, err := ioutil.ReadAll(stdout)
if err != nil {
return nil, err
}
errorLog, err := ioutil.ReadAll(stderr)
if err != nil {
return nil, err
}
done := make(chan error, 1)
go func() {
done <- cmd.Wait()
}()
select {
case err := <-done:
if err != nil {
log.Error(string(errorLog))
return nil, err
}
return strings.Split(strings.Replace(string(outLog), "\r\n", "\n", -1), "\n"), nil
}
}

76
local/ytdlVideoSource.go Normal file
View file

@ -0,0 +1,76 @@
package local
import (
log "github.com/sirupsen/logrus"
"github.com/lbryio/ytsync/v5/downloader/ytdl"
)
type YtdlVideoSource struct {
downloader Ytdl
enrichers []YouTubeVideoEnricher
}
func NewYtdlVideoSource(downloadDir string, config *YouTubeSourceConfig) (*YtdlVideoSource, error) {
ytdl, err := NewYtdl(downloadDir)
if err != nil {
return nil, err
}
source := YtdlVideoSource {
downloader: *ytdl,
}
if config.YouTubeAPIKey != "" {
ytapiEnricher := NewYouTubeAPIVideoEnricher(config.YouTubeAPIKey)
source.enrichers = append(source.enrichers, ytapiEnricher)
}
return &source, nil
}
func (s *YtdlVideoSource) GetVideo(id string) (*SourceVideo, error) {
metadata, err := s.downloader.GetVideoMetadata(id)
if err != nil {
return nil, err
}
videoPath, err := s.downloader.GetVideoFile(id)
if err != nil {
return nil, err
}
var bestThumbnail *ytdl.Thumbnail = nil
for i, thumbnail := range metadata.Thumbnails {
if i == 0 || bestThumbnail.Width < thumbnail.Width {
bestThumbnail = &thumbnail
}
}
sourceVideo := SourceVideo {
ID: id,
Title: &metadata.Title,
Description: &metadata.Description,
SourceURL: "\nhttps://www.youtube.com/watch?v=" + id,
Languages: []string{},
Tags: metadata.Tags,
ReleaseTime: nil,
ThumbnailURL: &bestThumbnail.URL,
FullLocalPath: videoPath,
}
for _, enricher := range s.enrichers {
err = enricher.EnrichMissing(&sourceVideo)
if err != nil {
log.Warnf("Error enriching video %s, continuing enrichment: %v", id, err)
}
}
log.Debugf("Source video retrieved via ytdl: %v", sourceVideo)
return &sourceVideo, nil
}
func (s *YtdlVideoSource) DeleteLocalCache(id string) error {
return s.downloader.DeleteVideoFiles(id)
}

119
main.go
View file

@ -1,135 +1,24 @@
package main package main
import ( import (
"fmt"
"math/rand" "math/rand"
"net/http" "net/http"
"os"
"time" "time"
"github.com/lbryio/ytsync/v5/configs" "github.com/lbryio/ytsync/v5/cmd"
"github.com/lbryio/ytsync/v5/manager"
"github.com/lbryio/ytsync/v5/shared"
ytUtils "github.com/lbryio/ytsync/v5/util"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/lbry.go/v2/extras/util"
"github.com/prometheus/client_golang/prometheus/promhttp" "github.com/prometheus/client_golang/prometheus/promhttp"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/spf13/cobra"
)
var Version string
const defaultMaxTries = 3
var (
cliFlags shared.SyncFlags
maxVideoLength int
) )
func main() { func main() {
rand.Seed(time.Now().UnixNano()) rand.Seed(time.Now().UnixNano())
log.SetLevel(log.DebugLevel) log.SetLevel(log.DebugLevel)
customFormatter := new(log.TextFormatter)
customFormatter.TimestampFormat = "2006-01-02 15:04:05"
customFormatter.FullTimestamp = true
log.SetFormatter(customFormatter)
http.Handle("/metrics", promhttp.Handler())
go func() { go func() {
http.Handle("/metrics", promhttp.Handler())
log.Error(http.ListenAndServe(":2112", nil)) log.Error(http.ListenAndServe(":2112", nil))
}() }()
cmd := &cobra.Command{
Use: "ytsync",
Short: "Publish youtube channels into LBRY network automatically.",
Run: ytSync,
Args: cobra.RangeArgs(0, 0),
}
cmd.Flags().IntVar(&cliFlags.MaxTries, "max-tries", defaultMaxTries, "Number of times to try a publish that fails") cmd.Execute()
cmd.Flags().BoolVar(&cliFlags.TakeOverExistingChannel, "takeover-existing-channel", false, "If channel exists and we don't own it, take over the channel")
cmd.Flags().IntVar(&cliFlags.Limit, "limit", 0, "limit the amount of channels to sync")
cmd.Flags().BoolVar(&cliFlags.SkipSpaceCheck, "skip-space-check", false, "Do not perform free space check on startup")
cmd.Flags().BoolVar(&cliFlags.SyncUpdate, "update", false, "Update previously synced channels instead of syncing new ones")
cmd.Flags().BoolVar(&cliFlags.SingleRun, "run-once", false, "Whether the process should be stopped after one cycle or not")
cmd.Flags().BoolVar(&cliFlags.RemoveDBUnpublished, "remove-db-unpublished", false, "Remove videos from the database that are marked as published but aren't really published")
cmd.Flags().BoolVar(&cliFlags.UpgradeMetadata, "upgrade-metadata", false, "Upgrade videos if they're on the old metadata version")
cmd.Flags().BoolVar(&cliFlags.DisableTransfers, "no-transfers", false, "Skips the transferring process of videos, channels and supports")
cmd.Flags().BoolVar(&cliFlags.QuickSync, "quick", false, "Look up only the last 50 videos from youtube")
cmd.Flags().StringVar(&cliFlags.Status, "status", "", "Specify which queue to pull from. Overrides --update")
cmd.Flags().StringVar(&cliFlags.SecondaryStatus, "status2", "", "Specify which secondary queue to pull from.")
cmd.Flags().StringVar(&cliFlags.ChannelID, "channelID", "", "If specified, only this channel will be synced.")
cmd.Flags().Int64Var(&cliFlags.SyncFrom, "after", time.Unix(0, 0).Unix(), "Specify from when to pull jobs [Unix time](Default: 0)")
cmd.Flags().Int64Var(&cliFlags.SyncUntil, "before", time.Now().AddDate(1, 0, 0).Unix(), "Specify until when to pull jobs [Unix time](Default: current Unix time)")
cmd.Flags().IntVar(&cliFlags.ConcurrentJobs, "concurrent-jobs", 1, "how many jobs to process concurrently")
cmd.Flags().IntVar(&cliFlags.VideosLimit, "videos-limit", 0, "how many videos to process per channel (leave 0 for automatic detection)")
cmd.Flags().IntVar(&cliFlags.MaxVideoSize, "max-size", 2048, "Maximum video size to process (in MB)")
cmd.Flags().IntVar(&maxVideoLength, "max-length", 2, "Maximum video length to process (in hours)")
if err := cmd.Execute(); err != nil {
fmt.Println(err)
os.Exit(1)
}
}
func ytSync(cmd *cobra.Command, args []string) {
err := configs.Init("./config.json")
if err != nil {
log.Fatalf("could not parse configuration file: %s", errors.FullTrace(err))
}
if configs.Configuration.SlackToken == "" {
log.Error("A slack token was not present in the config! Slack messages disabled!")
} else {
util.InitSlack(configs.Configuration.SlackToken, configs.Configuration.SlackChannel, configs.Configuration.GetHostname())
}
if cliFlags.Status != "" && !util.InSlice(cliFlags.Status, shared.SyncStatuses) {
log.Errorf("status must be one of the following: %v\n", shared.SyncStatuses)
return
}
if cliFlags.MaxTries < 1 {
log.Errorln("setting --max-tries less than 1 doesn't make sense")
return
}
if cliFlags.Limit < 0 {
log.Errorln("setting --limit less than 0 (unlimited) doesn't make sense")
return
}
cliFlags.MaxVideoLength = time.Duration(maxVideoLength) * time.Hour
if configs.Configuration.InternalApisEndpoint == "" {
log.Errorln("An Internal APIs Endpoint was not defined")
return
}
if configs.Configuration.InternalApisAuthToken == "" {
log.Errorln("An Internal APIs auth token was not defined")
return
}
if configs.Configuration.WalletS3Config.ID == "" || configs.Configuration.WalletS3Config.Region == "" || configs.Configuration.WalletS3Config.Bucket == "" || configs.Configuration.WalletS3Config.Secret == "" || configs.Configuration.WalletS3Config.Endpoint == "" {
log.Errorln("Wallet S3 configuration is incomplete")
return
}
if configs.Configuration.BlockchaindbS3Config.ID == "" || configs.Configuration.BlockchaindbS3Config.Region == "" || configs.Configuration.BlockchaindbS3Config.Bucket == "" || configs.Configuration.BlockchaindbS3Config.Secret == "" || configs.Configuration.BlockchaindbS3Config.Endpoint == "" {
log.Errorln("Blockchain DBs S3 configuration is incomplete")
return
}
if configs.Configuration.LbrycrdString == "" {
log.Infoln("Using default (local) lbrycrd instance. Set lbrycrd_string if you want to use something else")
}
blobsDir := ytUtils.GetBlobsDir()
sm := manager.NewSyncManager(
cliFlags,
blobsDir,
)
err = sm.Start()
if err != nil {
ytUtils.SendErrorToSlack(errors.FullTrace(err))
}
ytUtils.SendInfoToSlack("Syncing process terminated!")
} }

View file

@ -8,7 +8,6 @@ import (
"time" "time"
"github.com/lbryio/ytsync/v5/blobs_reflector" "github.com/lbryio/ytsync/v5/blobs_reflector"
"github.com/lbryio/ytsync/v5/configs"
"github.com/lbryio/ytsync/v5/ip_manager" "github.com/lbryio/ytsync/v5/ip_manager"
"github.com/lbryio/ytsync/v5/namer" "github.com/lbryio/ytsync/v5/namer"
"github.com/lbryio/ytsync/v5/sdk" "github.com/lbryio/ytsync/v5/sdk"
@ -25,17 +24,19 @@ type SyncManager struct {
CliFlags shared.SyncFlags CliFlags shared.SyncFlags
ApiConfig *sdk.APIConfig ApiConfig *sdk.APIConfig
LbrycrdDsn string LbrycrdDsn string
AwsConfigs *shared.AwsConfigs
blobsDir string blobsDir string
channelsToSync []Sync channelsToSync []Sync
} }
func NewSyncManager(cliFlags shared.SyncFlags, blobsDir string) *SyncManager { func NewSyncManager(cliFlags shared.SyncFlags, blobsDir, lbrycrdDsn string, awsConfigs *shared.AwsConfigs, apiConfig *sdk.APIConfig) *SyncManager {
return &SyncManager{ return &SyncManager{
CliFlags: cliFlags, CliFlags: cliFlags,
blobsDir: blobsDir, blobsDir: blobsDir,
LbrycrdDsn: configs.Configuration.LbrycrdString, LbrycrdDsn: lbrycrdDsn,
ApiConfig: sdk.GetAPIsConfigs(), AwsConfigs: awsConfigs,
ApiConfig: apiConfig,
} }
} }
func (s *SyncManager) enqueueChannel(channel *shared.YoutubeChannel) { func (s *SyncManager) enqueueChannel(channel *shared.YoutubeChannel) {
@ -57,9 +58,12 @@ func (s *SyncManager) Start() error {
} }
} }
var lastChannelProcessed string var (
var secondLastChannelProcessed string lastChannelProcessed string
syncCount := 0 secondLastChannelProcessed string
syncCount int
)
for { for {
s.channelsToSync = make([]Sync, 0, 10) // reset sync queue s.channelsToSync = make([]Sync, 0, 10) // reset sync queue
err := s.checkUsedSpace() err := s.checkUsedSpace()
@ -107,10 +111,12 @@ func (s *SyncManager) Start() error {
log.Infof("Drained the \"%s\" queue", q) log.Infof("Drained the \"%s\" queue", q)
} }
} }
if len(s.channelsToSync) == 0 { if len(s.channelsToSync) == 0 {
log.Infoln("No channels to sync. Pausing 5 minutes!") log.Infoln("No channels to sync. Pausing 5 minutes!")
time.Sleep(5 * time.Minute) time.Sleep(5 * time.Minute)
} }
for _, sync := range s.channelsToSync { for _, sync := range s.channelsToSync {
if lastChannelProcessed == sync.DbChannelData.ChannelId && secondLastChannelProcessed == lastChannelProcessed { if lastChannelProcessed == sync.DbChannelData.ChannelId && secondLastChannelProcessed == lastChannelProcessed {
util.SendToSlack("We just killed a sync for %s to stop looping! (%s)", sync.DbChannelData.DesiredChannelName, sync.DbChannelData.ChannelId) util.SendToSlack("We just killed a sync for %s to stop looping! (%s)", sync.DbChannelData.DesiredChannelName, sync.DbChannelData.ChannelId)
@ -138,7 +144,7 @@ func (s *SyncManager) Start() error {
"WALLET HAS NOT BEEN MOVED TO THE WALLET BACKUP DIR", "WALLET HAS NOT BEEN MOVED TO THE WALLET BACKUP DIR",
"NotEnoughFunds", "NotEnoughFunds",
"no space left on device", "no space left on device",
"there was a problem uploading the wallet", "failure uploading wallet",
"the channel in the wallet is different than the channel in the database", "the channel in the wallet is different than the channel in the database",
"this channel does not belong to this wallet!", "this channel does not belong to this wallet!",
"You already have a stream claim published under the name", "You already have a stream claim published under the name",
@ -173,6 +179,7 @@ func (s *SyncManager) Start() error {
break break
} }
} }
return nil return nil
} }

View file

@ -3,13 +3,6 @@ package manager
import ( import (
"os" "os"
"path/filepath" "path/filepath"
"strings"
"time"
"github.com/lbryio/ytsync/v5/configs"
"github.com/lbryio/ytsync/v5/util"
"github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/aws/aws-sdk-go/aws" "github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/awserr" "github.com/aws/aws-sdk-go/aws/awserr"
@ -17,21 +10,24 @@ import (
"github.com/aws/aws-sdk-go/service/s3" "github.com/aws/aws-sdk-go/service/s3"
"github.com/aws/aws-sdk-go/service/s3/s3manager" "github.com/aws/aws-sdk-go/service/s3/s3manager"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/lbryio/lbry.go/v2/extras/errors"
logUtils "github.com/lbryio/ytsync/v5/util"
) )
func (s *Sync) getS3Downloader(config *aws.Config) (*s3manager.Downloader, error) { func (s *Sync) getS3Downloader() (*s3manager.Downloader, error) {
s3Session, err := session.NewSession(config) s3Session, err := session.NewSession(s.Manager.AwsConfigs.GetS3AWSConfig())
if err != nil { if err != nil {
return nil, errors.Prefix("error starting session", err) return nil, errors.Prefix("error starting session: ", err)
} }
downloader := s3manager.NewDownloader(s3Session) downloader := s3manager.NewDownloader(s3Session)
return downloader, nil return downloader, nil
} }
func (s *Sync) getS3Uploader() (*s3manager.Uploader, error) {
func (s *Sync) getS3Uploader(config *aws.Config) (*s3manager.Uploader, error) { s3Session, err := session.NewSession(s.Manager.AwsConfigs.GetS3AWSConfig())
s3Session, err := session.NewSession(config)
if err != nil { if err != nil {
return nil, errors.Prefix("error starting session", err) return nil, errors.Prefix("error starting session: ", err)
} }
uploader := s3manager.NewUploader(s3Session) uploader := s3manager.NewUploader(s3Session)
return uploader, nil return uploader, nil
@ -42,18 +38,18 @@ func (s *Sync) downloadWallet() error {
if err != nil { if err != nil {
return errors.Err(err) return errors.Err(err)
} }
downloader, err := s.getS3Downloader(configs.Configuration.WalletS3Config.GetS3AWSConfig()) downloader, err := s.getS3Downloader()
if err != nil { if err != nil {
return err return err
} }
out, err := os.Create(defaultTempWalletDir) out, err := os.Create(defaultTempWalletDir)
if err != nil { if err != nil {
return errors.Prefix("error creating temp wallet", err) return errors.Prefix("error creating temp wallet: ", err)
} }
defer out.Close() defer out.Close()
bytesWritten, err := downloader.Download(out, &s3.GetObjectInput{ bytesWritten, err := downloader.Download(out, &s3.GetObjectInput{
Bucket: aws.String(configs.Configuration.WalletS3Config.Bucket), Bucket: aws.String(s.Manager.AwsConfigs.AwsS3Bucket),
Key: key, Key: key,
}) })
if err != nil { if err != nil {
@ -78,21 +74,21 @@ func (s *Sync) downloadWallet() error {
err = os.Rename(defaultTempWalletDir, defaultWalletDir) err = os.Rename(defaultTempWalletDir, defaultWalletDir)
if err != nil { if err != nil {
return errors.Prefix("error replacing temp wallet for default wallet", err) return errors.Prefix("error replacing temp wallet for default wallet: ", err)
} }
return nil return nil
} }
func (s *Sync) downloadBlockchainDB() error { func (s *Sync) downloadBlockchainDB() error {
if util.IsRegTest() { if logUtils.IsRegTest() {
return nil // tests fail if we re-use the same blockchain DB return nil // tests fail if we re-use the same blockchain DB
} }
defaultBDBPath, defaultTempBDBPath, key, err := s.getBlockchainDBPaths() defaultBDBDir, defaultTempBDBDir, key, err := s.getBlockchainDBPaths()
if err != nil { if err != nil {
return errors.Err(err) return errors.Err(err)
} }
files, err := filepath.Glob(defaultBDBPath + "*") files, err := filepath.Glob(defaultBDBDir + "*")
if err != nil { if err != nil {
return errors.Err(err) return errors.Err(err)
} }
@ -105,18 +101,18 @@ func (s *Sync) downloadBlockchainDB() error {
if s.DbChannelData.WipeDB { if s.DbChannelData.WipeDB {
return nil return nil
} }
downloader, err := s.getS3Downloader(configs.Configuration.BlockchaindbS3Config.GetS3AWSConfig()) downloader, err := s.getS3Downloader()
if err != nil { if err != nil {
return errors.Err(err) return errors.Err(err)
} }
out, err := os.Create(defaultTempBDBPath) out, err := os.Create(defaultTempBDBDir)
if err != nil { if err != nil {
return errors.Prefix("error creating temp blockchain DB file", err) return errors.Prefix("error creating temp wallet: ", err)
} }
defer out.Close() defer out.Close()
bytesWritten, err := downloader.Download(out, &s3.GetObjectInput{ bytesWritten, err := downloader.Download(out, &s3.GetObjectInput{
Bucket: aws.String(configs.Configuration.BlockchaindbS3Config.Bucket), Bucket: aws.String(s.Manager.AwsConfigs.AwsS3Bucket),
Key: key, Key: key,
}) })
if err != nil { if err != nil {
@ -139,16 +135,11 @@ func (s *Sync) downloadBlockchainDB() error {
return errors.Err("zero bytes written") return errors.Err("zero bytes written")
} }
blockchainDbDir := strings.Replace(defaultBDBPath, "blockchain.db", "", -1) err = os.Rename(defaultTempBDBDir, defaultBDBDir)
err = util.Untar(defaultTempBDBPath, blockchainDbDir)
if err != nil { if err != nil {
return errors.Prefix("error extracting blockchain.db files", err) return errors.Prefix("error replacing temp blockchain.db for default blockchain.db: ", err)
} }
err = os.Remove(defaultTempBDBPath) log.Printf("blockchain.db downloaded to %s", defaultBDBDir)
if err != nil {
return errors.Err(err)
}
log.Printf("blockchain.db data downloaded and extracted to %s", blockchainDbDir)
return nil return nil
} }
@ -156,7 +147,7 @@ func (s *Sync) getWalletPaths() (defaultWallet, tempWallet string, key *string,
defaultWallet = os.Getenv("HOME") + "/.lbryum/wallets/default_wallet" defaultWallet = os.Getenv("HOME") + "/.lbryum/wallets/default_wallet"
tempWallet = os.Getenv("HOME") + "/.lbryum/wallets/tmp_wallet" tempWallet = os.Getenv("HOME") + "/.lbryum/wallets/tmp_wallet"
key = aws.String("/wallets/" + s.DbChannelData.ChannelId) key = aws.String("/wallets/" + s.DbChannelData.ChannelId)
if util.IsRegTest() { if logUtils.IsRegTest() {
defaultWallet = os.Getenv("HOME") + "/.lbryum_regtest/wallets/default_wallet" defaultWallet = os.Getenv("HOME") + "/.lbryum_regtest/wallets/default_wallet"
tempWallet = os.Getenv("HOME") + "/.lbryum_regtest/wallets/tmp_wallet" tempWallet = os.Getenv("HOME") + "/.lbryum_regtest/wallets/tmp_wallet"
key = aws.String("/regtest/" + s.DbChannelData.ChannelId) key = aws.String("/regtest/" + s.DbChannelData.ChannelId)
@ -177,27 +168,27 @@ func (s *Sync) getWalletPaths() (defaultWallet, tempWallet string, key *string,
func (s *Sync) getBlockchainDBPaths() (defaultDB, tempDB string, key *string, err error) { func (s *Sync) getBlockchainDBPaths() (defaultDB, tempDB string, key *string, err error) {
lbryumDir := os.Getenv("LBRYUM_DIR") lbryumDir := os.Getenv("LBRYUM_DIR")
if lbryumDir == "" { if lbryumDir == "" {
if util.IsRegTest() { if logUtils.IsRegTest() {
lbryumDir = os.Getenv("HOME") + "/.lbryum_regtest" lbryumDir = os.Getenv("HOME") + "/.lbryum_regtest"
} else { } else {
lbryumDir = os.Getenv("HOME") + "/.lbryum" lbryumDir = os.Getenv("HOME") + "/.lbryum"
} }
} }
defaultDB = lbryumDir + "/lbc_mainnet/blockchain.db" defaultDB = lbryumDir + "/lbc_mainnet/blockchain.db"
tempDB = lbryumDir + "/lbc_mainnet/tmp_blockchain.tar" tempDB = lbryumDir + "/lbc_mainnet/tmp_blockchain.db"
key = aws.String("/blockchain_dbs/" + s.DbChannelData.ChannelId + ".tar") key = aws.String("/blockchain_dbs/" + s.DbChannelData.ChannelId)
if util.IsRegTest() { if logUtils.IsRegTest() {
defaultDB = lbryumDir + "/lbc_regtest/blockchain.db" defaultDB = lbryumDir + "/lbc_regtest/blockchain.db"
tempDB = lbryumDir + "/lbc_regtest/tmp_blockchain.tar" tempDB = lbryumDir + "/lbc_regtest/tmp_blockchain.db"
key = aws.String("/regtest_dbs/" + s.DbChannelData.ChannelId + ".tar") key = aws.String("/regtest_dbs/" + s.DbChannelData.ChannelId)
} }
return return
} }
func (s *Sync) uploadWallet() error { func (s *Sync) uploadWallet() error {
defaultWalletDir := util.GetDefaultWalletPath() defaultWalletDir := logUtils.GetDefaultWalletPath()
key := aws.String("/wallets/" + s.DbChannelData.ChannelId) key := aws.String("/wallets/" + s.DbChannelData.ChannelId)
if util.IsRegTest() { if logUtils.IsRegTest() {
key = aws.String("/regtest/" + s.DbChannelData.ChannelId) key = aws.String("/regtest/" + s.DbChannelData.ChannelId)
} }
@ -205,7 +196,7 @@ func (s *Sync) uploadWallet() error {
return errors.Err("default_wallet does not exist") return errors.Err("default_wallet does not exist")
} }
uploader, err := s.getS3Uploader(configs.Configuration.WalletS3Config.GetS3AWSConfig()) uploader, err := s.getS3Uploader()
if err != nil { if err != nil {
return err return err
} }
@ -216,22 +207,13 @@ func (s *Sync) uploadWallet() error {
} }
defer file.Close() defer file.Close()
start := time.Now()
for time.Since(start) < 30*time.Minute {
_, err = uploader.Upload(&s3manager.UploadInput{ _, err = uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String(configs.Configuration.WalletS3Config.Bucket), Bucket: aws.String(s.Manager.AwsConfigs.AwsS3Bucket),
Key: key, Key: key,
Body: file, Body: file,
}) })
if err != nil { if err != nil {
time.Sleep(30 * time.Second) return err
continue
}
break
}
if err != nil {
return errors.Prefix("there was a problem uploading the wallet to S3", errors.Err(err))
} }
log.Println("wallet uploaded to S3") log.Println("wallet uploaded to S3")
@ -247,39 +229,26 @@ func (s *Sync) uploadBlockchainDB() error {
if _, err := os.Stat(defaultBDBDir); os.IsNotExist(err) { if _, err := os.Stat(defaultBDBDir); os.IsNotExist(err) {
return errors.Err("blockchain.db does not exist") return errors.Err("blockchain.db does not exist")
} }
files, err := filepath.Glob(defaultBDBDir + "*")
if err != nil { uploader, err := s.getS3Uploader()
return errors.Err(err)
}
tarPath := strings.Replace(defaultBDBDir, "blockchain.db", "", -1) + s.DbChannelData.ChannelId + ".tar"
err = util.CreateTarball(tarPath, files)
if err != nil { if err != nil {
return err return err
} }
uploader, err := s.getS3Uploader(configs.Configuration.BlockchaindbS3Config.GetS3AWSConfig()) file, err := os.Open(defaultBDBDir)
if err != nil {
return err
}
file, err := os.Open(tarPath)
if err != nil { if err != nil {
return err return err
} }
defer file.Close() defer file.Close()
_, err = uploader.Upload(&s3manager.UploadInput{ _, err = uploader.Upload(&s3manager.UploadInput{
Bucket: aws.String(configs.Configuration.BlockchaindbS3Config.Bucket), Bucket: aws.String(s.Manager.AwsConfigs.AwsS3Bucket),
Key: key, Key: key,
Body: file, Body: file,
}) })
if err != nil { if err != nil {
return err return err
} }
log.Println("blockchain.db files uploaded to S3") log.Println("blockchain.db uploaded to S3")
err = os.Remove(tarPath)
if err != nil {
return errors.Err(err)
}
return os.Remove(defaultBDBDir) return os.Remove(defaultBDBDir)
} }

View file

@ -103,9 +103,6 @@ func (s *Sync) walletSetup() error {
videosOnYoutube = s.Manager.CliFlags.VideosToSync(s.DbChannelData.TotalSubscribers) videosOnYoutube = s.Manager.CliFlags.VideosToSync(s.DbChannelData.TotalSubscribers)
} }
unallocatedVideos := videosOnYoutube - (publishedCount + failedCount) unallocatedVideos := videosOnYoutube - (publishedCount + failedCount)
if unallocatedVideos < 0 {
unallocatedVideos = 0
}
channelFee := channelClaimAmount channelFee := channelClaimAmount
channelAlreadyClaimed := s.DbChannelData.ChannelClaimID != "" channelAlreadyClaimed := s.DbChannelData.ChannelClaimID != ""
if channelAlreadyClaimed { if channelAlreadyClaimed {
@ -113,7 +110,7 @@ func (s *Sync) walletSetup() error {
} }
requiredBalance := float64(unallocatedVideos)*(publishAmount+estimatedMaxTxFee) + channelFee requiredBalance := float64(unallocatedVideos)*(publishAmount+estimatedMaxTxFee) + channelFee
if s.Manager.CliFlags.UpgradeMetadata { if s.Manager.CliFlags.UpgradeMetadata {
requiredBalance += float64(notUpgradedCount) * estimatedMaxTxFee requiredBalance += float64(notUpgradedCount) * 0.001
} }
refillAmount := 0.0 refillAmount := 0.0
@ -130,12 +127,6 @@ func (s *Sync) walletSetup() error {
if err != nil { if err != nil {
return errors.Err(err) return errors.Err(err)
} }
} else if balance > requiredBalance {
extraLBC := balance - requiredBalance
if extraLBC > 5 {
sendBackAmount := extraLBC - 1
logUtils.SendInfoToSlack("channel %s has %.1f credits which is %.1f more than it requires (%.1f). We should send at least %.1f that back.", s.DbChannelData.ChannelId, balance, extraLBC, requiredBalance, sendBackAmount)
}
} }
claimAddress, err := s.daemon.AddressList(nil, nil, 1, 20) claimAddress, err := s.daemon.AddressList(nil, nil, 1, 20)
@ -320,12 +311,12 @@ func (s *Sync) waitForNewBlock() error {
func (s *Sync) GenerateRegtestBlock() error { func (s *Sync) GenerateRegtestBlock() error {
lbrycrd, err := logUtils.GetLbrycrdClient(s.Manager.LbrycrdDsn) lbrycrd, err := logUtils.GetLbrycrdClient(s.Manager.LbrycrdDsn)
if err != nil { if err != nil {
return errors.Prefix("error getting lbrycrd client", err) return errors.Prefix("error getting lbrycrd client: ", err)
} }
txs, err := lbrycrd.Generate(1) txs, err := lbrycrd.Generate(1)
if err != nil { if err != nil {
return errors.Prefix("error generating new block", err) return errors.Prefix("error generating new block: ", err)
} }
for _, tx := range txs { for _, tx := range txs {
@ -375,12 +366,14 @@ func (s *Sync) ensureChannelOwnership() error {
channelUsesOldMetadata := false channelUsesOldMetadata := false
if channelToUse != nil { if channelToUse != nil {
channelUsesOldMetadata = channelToUse.Value.GetThumbnail() == nil || (len(channelToUse.Value.GetLanguages()) == 0 && s.DbChannelData.Language != "") channelUsesOldMetadata = channelToUse.Value.GetThumbnail() == nil
if !channelUsesOldMetadata { if !channelUsesOldMetadata {
return nil return nil
} }
} }
channelBidAmount := channelClaimAmount
balanceResp, err := s.daemon.AccountBalance(nil) balanceResp, err := s.daemon.AccountBalance(nil)
if err != nil { if err != nil {
return err return err
@ -392,8 +385,8 @@ func (s *Sync) ensureChannelOwnership() error {
return errors.Err(err) return errors.Err(err)
} }
if balance.LessThan(decimal.NewFromFloat(channelClaimAmount)) { if balance.LessThan(decimal.NewFromFloat(channelBidAmount)) {
err = s.addCredits(channelClaimAmount + estimatedMaxTxFee*3) err = s.addCredits(channelBidAmount + 0.3)
if err != nil { if err != nil {
return err return err
} }
@ -414,7 +407,7 @@ func (s *Sync) ensureChannelOwnership() error {
} }
thumbnail := channelInfo.Header.C4TabbedHeaderRenderer.Avatar.Thumbnails[len(channelInfo.Header.C4TabbedHeaderRenderer.Avatar.Thumbnails)-1].URL thumbnail := channelInfo.Header.C4TabbedHeaderRenderer.Avatar.Thumbnails[len(channelInfo.Header.C4TabbedHeaderRenderer.Avatar.Thumbnails)-1].URL
thumbnailURL, err := thumbs.MirrorThumbnail(thumbnail, s.DbChannelData.ChannelId) thumbnailURL, err := thumbs.MirrorThumbnail(thumbnail, s.DbChannelData.ChannelId, *s.Manager.AwsConfigs.GetS3AWSConfig())
if err != nil { if err != nil {
return err return err
} }
@ -423,6 +416,7 @@ func (s *Sync) ensureChannelOwnership() error {
if channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails != nil { if channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails != nil {
bURL, err := thumbs.MirrorThumbnail(channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails[len(channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails)-1].URL, bURL, err := thumbs.MirrorThumbnail(channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails[len(channelInfo.Header.C4TabbedHeaderRenderer.Banner.Thumbnails)-1].URL,
"banner-"+s.DbChannelData.ChannelId, "banner-"+s.DbChannelData.ChannelId,
*s.Manager.AwsConfigs.GetS3AWSConfig(),
) )
if err != nil { if err != nil {
return err return err
@ -431,16 +425,18 @@ func (s *Sync) ensureChannelOwnership() error {
} }
var languages []string = nil var languages []string = nil
if s.DbChannelData.Language != "" { //we don't have this data without the API
languages = []string{s.DbChannelData.Language} //if channelInfo.DefaultLanguage != "" {
} // if channelInfo.DefaultLanguage == "iw" {
// channelInfo.DefaultLanguage = "he"
// }
// languages = []string{channelInfo.DefaultLanguage}
//}
var locations []jsonrpc.Location = nil var locations []jsonrpc.Location = nil
if channelInfo.Topbar.DesktopTopbarRenderer.CountryCode != "" { if channelInfo.Topbar.DesktopTopbarRenderer.CountryCode != "" {
locations = []jsonrpc.Location{{Country: &channelInfo.Topbar.DesktopTopbarRenderer.CountryCode}} locations = []jsonrpc.Location{{Country: &channelInfo.Topbar.DesktopTopbarRenderer.CountryCode}}
} }
var c *jsonrpc.TransactionSummary var c *jsonrpc.TransactionSummary
var recoveredChannelClaimID string
claimCreateOptions := jsonrpc.ClaimCreateOptions{ claimCreateOptions := jsonrpc.ClaimCreateOptions{
Title: &channelInfo.Microformat.MicroformatDataRenderer.Title, Title: &channelInfo.Microformat.MicroformatDataRenderer.Title,
Description: &channelInfo.Metadata.ChannelMetadataRenderer.Description, Description: &channelInfo.Metadata.ChannelMetadataRenderer.Description,
@ -450,20 +446,12 @@ func (s *Sync) ensureChannelOwnership() error {
ThumbnailURL: &thumbnailURL, ThumbnailURL: &thumbnailURL,
} }
if channelUsesOldMetadata { if channelUsesOldMetadata {
da, err := s.getDefaultAccount()
if err != nil {
return err
}
if s.DbChannelData.TransferState <= 1 { if s.DbChannelData.TransferState <= 1 {
c, err = s.daemon.ChannelUpdate(s.DbChannelData.ChannelClaimID, jsonrpc.ChannelUpdateOptions{ c, err = s.daemon.ChannelUpdate(s.DbChannelData.ChannelClaimID, jsonrpc.ChannelUpdateOptions{
ClearTags: util.PtrToBool(true), ClearTags: util.PtrToBool(true),
ClearLocations: util.PtrToBool(true), ClearLocations: util.PtrToBool(true),
ClearLanguages: util.PtrToBool(true), ClearLanguages: util.PtrToBool(true),
ChannelCreateOptions: jsonrpc.ChannelCreateOptions{ ChannelCreateOptions: jsonrpc.ChannelCreateOptions{
AccountID: &da,
FundingAccountIDs: []string{
da,
},
ClaimCreateOptions: claimCreateOptions, ClaimCreateOptions: claimCreateOptions,
CoverURL: bannerURL, CoverURL: bannerURL,
}, },
@ -473,50 +461,20 @@ func (s *Sync) ensureChannelOwnership() error {
return nil return nil
} }
} else { } else {
c, err = s.daemon.ChannelCreate(s.DbChannelData.DesiredChannelName, channelClaimAmount, jsonrpc.ChannelCreateOptions{ c, err = s.daemon.ChannelCreate(s.DbChannelData.DesiredChannelName, channelBidAmount, jsonrpc.ChannelCreateOptions{
ClaimCreateOptions: claimCreateOptions, ClaimCreateOptions: claimCreateOptions,
CoverURL: bannerURL, CoverURL: bannerURL,
}) })
if err != nil {
claimId, err2 := s.getChannelClaimIDForTimedOutCreation()
if err2 != nil {
err = errors.Prefix(err2.Error(), err)
} else {
recoveredChannelClaimID = claimId
}
}
} }
if err != nil { if err != nil {
return err return err
} }
if recoveredChannelClaimID != "" {
s.DbChannelData.ChannelClaimID = recoveredChannelClaimID
} else {
s.DbChannelData.ChannelClaimID = c.Outputs[0].ClaimID s.DbChannelData.ChannelClaimID = c.Outputs[0].ClaimID
}
return s.Manager.ApiConfig.SetChannelClaimID(s.DbChannelData.ChannelId, s.DbChannelData.ChannelClaimID) return s.Manager.ApiConfig.SetChannelClaimID(s.DbChannelData.ChannelId, s.DbChannelData.ChannelClaimID)
} }
//getChannelClaimIDForTimedOutCreation is a raw function that returns the only channel that exists in the wallet
// this is used because the SDK sucks and can't figure out when to return when creating a claim...
func (s *Sync) getChannelClaimIDForTimedOutCreation() (string, error) {
channels, err := s.daemon.ChannelList(nil, 1, 500, nil)
if err != nil {
return "", err
} else if channels == nil {
return "", errors.Err("no channel response")
}
if len((*channels).Items) != 1 {
return "", errors.Err("more than one channel found when trying to recover from SDK failure in creating the channel")
}
desiredChannel := (*channels).Items[0]
if desiredChannel.Name != s.DbChannelData.DesiredChannelName {
return "", errors.Err("the channel found in the wallet has a different name than the one we expected")
}
return desiredChannel.ClaimID, nil
}
func (s *Sync) addCredits(amountToAdd float64) error { func (s *Sync) addCredits(amountToAdd float64) error {
start := time.Now() start := time.Now()
defer func(start time.Time) { defer func(start time.Time) {

View file

@ -241,7 +241,7 @@ func transferVideos(s *Sync) error {
}, },
}, },
}, },
Bid: util.PtrToString(fmt.Sprintf("%.5f", publishAmount/2.)), Bid: util.PtrToString("0.005"), // Todo - Dont hardcode
} }
videoStatus := shared.VideoStatus{ videoStatus := shared.VideoStatus{
ChannelID: s.DbChannelData.ChannelId, ChannelID: s.DbChannelData.ChannelId,
@ -293,7 +293,7 @@ func (s *Sync) streamUpdate(ui *updateInfo) error {
timing.TimedComponent("transferStreamUpdate").Add(time.Since(start)) timing.TimedComponent("transferStreamUpdate").Add(time.Since(start))
if updateError != nil { if updateError != nil {
ui.videoStatus.FailureReason = updateError.Error() ui.videoStatus.FailureReason = updateError.Error()
ui.videoStatus.Status = shared.VideoStatusTransferFailed ui.videoStatus.Status = shared.VideoStatusTranferFailed
ui.videoStatus.IsTransferred = util.PtrToBool(false) ui.videoStatus.IsTransferred = util.PtrToBool(false)
} else { } else {
ui.videoStatus.IsTransferred = util.PtrToBool(len(result.Outputs) != 0) ui.videoStatus.IsTransferred = util.PtrToBool(len(result.Outputs) != 0)

View file

@ -33,10 +33,11 @@ import (
const ( const (
channelClaimAmount = 0.01 channelClaimAmount = 0.01
estimatedMaxTxFee = 0.0015 estimatedMaxTxFee = 0.1
minimumAccountBalance = 1.0 minimumAccountBalance = 1.0
minimumRefillAmount = 1 minimumRefillAmount = 1
publishAmount = 0.002 publishAmount = 0.01
maxReasonLength = 500
) )
// Sync stores the options that control how syncing happens // Sync stores the options that control how syncing happens
@ -285,8 +286,6 @@ func (s *Sync) setChannelTerminationStatus(e *error) {
"interrupted during daemon startup", "interrupted during daemon startup",
"interrupted by user", "interrupted by user",
"use --skip-space-check to ignore", "use --skip-space-check to ignore",
"failure uploading blockchain DB",
"default_wallet already exists",
} }
dbWipeConditions := []string{ dbWipeConditions := []string{
"Missing inputs", "Missing inputs",
@ -336,7 +335,7 @@ func (s *Sync) waitForDaemonStart() error {
} }
func (s *Sync) stopAndUploadWallet(e *error) { func (s *Sync) stopAndUploadWallet(e *error) {
log.Println("Stopping daemon") log.Printf("Stopping daemon")
shutdownErr := logUtils.StopDaemon() shutdownErr := logUtils.StopDaemon()
if shutdownErr != nil { if shutdownErr != nil {
logShutdownError(shutdownErr) logShutdownError(shutdownErr)
@ -351,17 +350,17 @@ func (s *Sync) stopAndUploadWallet(e *error) {
err := s.uploadWallet() err := s.uploadWallet()
if err != nil { if err != nil {
if *e == nil { if *e == nil {
*e = err e = &err
} else { } else {
*e = errors.Prefix(fmt.Sprintf("%s + original error", errors.FullTrace(err)), *e) *e = errors.Prefix("failure uploading wallet", *e)
} }
} }
err = s.uploadBlockchainDB() err = s.uploadBlockchainDB()
if err != nil { if err != nil {
if *e == nil { if *e == nil {
*e = err e = &err
} else { } else {
*e = errors.Prefix(fmt.Sprintf("failure uploading blockchain DB: %s + original error", errors.FullTrace(err)), *e) *e = errors.Prefix("failure uploading wallet", *e)
} }
} }
} }
@ -496,7 +495,7 @@ func (s *Sync) updateRemoteDB(claims []jsonrpc.Claim, ownClaims []jsonrpc.Claim)
claimMarkedUnpublished := claimInDatabase && !sv.Published claimMarkedUnpublished := claimInDatabase && !sv.Published
_, isOwnClaim := ownClaimsInfo[videoID] _, isOwnClaim := ownClaimsInfo[videoID]
transferred := !isOwnClaim || s.DbChannelData.TransferState == 3 transferred := !isOwnClaim || s.DbChannelData.TransferState == 3
transferStatusMismatch := claimInDatabase && sv.Transferred != transferred transferStatusMismatch := sv.Transferred != transferred
if metadataDiffers { if metadataDiffers {
log.Debugf("%s: Mismatch in database for metadata. DB: %d - Blockchain: %d", videoID, sv.MetadataVersion, chainInfo.MetadataVersion) log.Debugf("%s: Mismatch in database for metadata. DB: %d - Blockchain: %d", videoID, sv.MetadataVersion, chainInfo.MetadataVersion)
@ -557,11 +556,7 @@ func (s *Sync) updateRemoteDB(claims []jsonrpc.Claim, ownClaims []jsonrpc.Claim)
if sv.Transferred || sv.IsLbryFirst { if sv.Transferred || sv.IsLbryFirst {
_, ok := allClaimsInfo[vID] _, ok := allClaimsInfo[vID]
if !ok && sv.Published { if !ok && sv.Published {
searchResponse, err := s.daemon.ClaimSearch(jsonrpc.ClaimSearchArgs{ searchResponse, err := s.daemon.ClaimSearch(nil, &sv.ClaimID, nil, nil, 1, 20)
ClaimID: &sv.ClaimID,
Page: 1,
PageSize: 20,
})
if err != nil { if err != nil {
log.Error(err.Error()) log.Error(err.Error())
continue continue
@ -677,8 +672,7 @@ func (s *Sync) checkIntegrity() error {
if pubsOnWallet > pubsOnDB { //This case should never happen if pubsOnWallet > pubsOnDB { //This case should never happen
logUtils.SendInfoToSlack("We're claiming to have published %d videos but in reality we published %d (%s)", pubsOnDB, pubsOnWallet, s.DbChannelData.ChannelId) logUtils.SendInfoToSlack("We're claiming to have published %d videos but in reality we published %d (%s)", pubsOnDB, pubsOnWallet, s.DbChannelData.ChannelId)
//we never really done anything about those. it happens when a user updates the channel for a publish to another ytsync channel return errors.Err("not all published videos are in the database")
//return errors.Err("not all published videos are in the database")
} }
if pubsOnWallet < pubsOnDB { if pubsOnWallet < pubsOnDB {
logUtils.SendInfoToSlack("we're claiming to have published %d videos but we only published %d (%s)", pubsOnDB, pubsOnWallet, s.DbChannelData.ChannelId) logUtils.SendInfoToSlack("we're claiming to have published %d videos but we only published %d (%s)", pubsOnDB, pubsOnWallet, s.DbChannelData.ChannelId)
@ -867,8 +861,9 @@ func (s *Sync) enqueueYoutubeVideos() error {
return err return err
} }
videos, err := ytapi.GetVideosToSync(s.DbChannelData.ChannelId, s.syncedVideos, s.Manager.CliFlags.QuickSync, s.Manager.CliFlags.VideosToSync(s.DbChannelData.TotalSubscribers), ytapi.VideoParams{ videos, err := ytapi.GetVideosToSync(s.Manager.ApiConfig, s.DbChannelData.ChannelId, s.syncedVideos, s.Manager.CliFlags.QuickSync, s.Manager.CliFlags.VideosToSync(s.DbChannelData.TotalSubscribers), ytapi.VideoParams{
VideoDir: s.videoDirectory, VideoDir: s.videoDirectory,
S3Config: *s.Manager.AwsConfigs.GetS3AWSConfig(),
Stopper: s.grp, Stopper: s.grp,
IPPool: ipPool, IPPool: ipPool,
}, s.DbChannelData.LastUploadedVideo) }, s.DbChannelData.LastUploadedVideo)

View file

@ -1,17 +1,31 @@
package metrics package metrics
import ( import (
"github.com/lbryio/ytsync/v5/configs" "os"
"regexp"
"github.com/prometheus/client_golang/prometheus" "github.com/prometheus/client_golang/prometheus"
"github.com/prometheus/client_golang/prometheus/promauto" "github.com/prometheus/client_golang/prometheus/promauto"
log "github.com/sirupsen/logrus"
) )
var ( var (
Durations = promauto.NewHistogramVec(prometheus.HistogramOpts{ Durations = promauto.NewHistogramVec(prometheus.HistogramOpts{
Namespace: "ytsync", Namespace: "ytsync",
Subsystem: configs.Configuration.GetHostname(), Subsystem: getHostname(),
Name: "duration", Name: "duration",
Help: "The durations of the individual modules", Help: "The durations of the individual modules",
}, []string{"path"}) }, []string{"path"})
) )
func getHostname() string {
hostname, err := os.Hostname()
if err != nil {
hostname = "ytsync_unknown"
}
reg, err := regexp.Compile("[^a-zA-Z0-9_]+")
if err != nil {
log.Fatal(err)
}
return reg.ReplaceAllString(hostname, "_")
}

View file

@ -13,7 +13,6 @@ import (
"github.com/lbryio/lbry.go/v2/extras/errors" "github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/lbry.go/v2/extras/null" "github.com/lbryio/lbry.go/v2/extras/null"
"github.com/lbryio/ytsync/v5/configs"
"github.com/lbryio/ytsync/v5/shared" "github.com/lbryio/ytsync/v5/shared"
"github.com/lbryio/ytsync/v5/util" "github.com/lbryio/ytsync/v5/util"
@ -26,24 +25,12 @@ const (
) )
type APIConfig struct { type APIConfig struct {
YoutubeAPIKey string
ApiURL string ApiURL string
ApiToken string ApiToken string
HostName string HostName string
} }
var instance *APIConfig
func GetAPIsConfigs() *APIConfig {
if instance == nil {
instance = &APIConfig{
ApiURL: configs.Configuration.InternalApisEndpoint,
ApiToken: configs.Configuration.InternalApisAuthToken,
HostName: configs.Configuration.GetHostname(),
}
}
return instance
}
func (a *APIConfig) FetchChannels(status string, cliFlags *shared.SyncFlags) ([]shared.YoutubeChannel, error) { func (a *APIConfig) FetchChannels(status string, cliFlags *shared.SyncFlags) ([]shared.YoutubeChannel, error) {
type apiJobsResponse struct { type apiJobsResponse struct {
Success bool `json:"success"` Success bool `json:"success"`
@ -61,10 +48,13 @@ func (a *APIConfig) FetchChannels(status string, cliFlags *shared.SyncFlags) ([]
"channel_id": {cliFlags.ChannelID}, "channel_id": {cliFlags.ChannelID},
}) })
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.FetchChannels(status, cliFlags) return a.FetchChannels(status, cliFlags)
} }
return nil, errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
@ -121,10 +111,13 @@ func (a *APIConfig) SetChannelCert(certHex string, channelID string) error {
"auth_token": {a.ApiToken}, "auth_token": {a.ApiToken},
}) })
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.SetChannelCert(certHex, channelID) return a.SetChannelCert(certHex, channelID)
} }
return errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
@ -166,10 +159,13 @@ func (a *APIConfig) SetChannelStatus(channelID string, status string, failureRea
} }
res, err := http.PostForm(endpoint, params) res, err := http.PostForm(endpoint, params)
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.SetChannelStatus(channelID, status, failureReason, transferState) return a.SetChannelStatus(channelID, status, failureReason, transferState)
} }
return nil, nil, errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode >= http.StatusInternalServerError { if res.StatusCode >= http.StatusInternalServerError {
@ -213,10 +209,13 @@ func (a *APIConfig) SetChannelClaimID(channelID string, channelClaimID string) e
"channel_claim_id": {channelClaimID}, "channel_claim_id": {channelClaimID},
}) })
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.SetChannelClaimID(channelID, channelClaimID) return a.SetChannelClaimID(channelID, channelClaimID)
} }
return errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
@ -254,10 +253,13 @@ func (a *APIConfig) DeleteVideos(videos []string) error {
} }
res, err := http.PostForm(endpoint, vals) res, err := http.PostForm(endpoint, vals)
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.DeleteVideos(videos) return a.DeleteVideos(videos)
} }
return errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
@ -317,10 +319,13 @@ func (a *APIConfig) MarkVideoStatus(status shared.VideoStatus) error {
} }
res, err := http.PostForm(endpoint, vals) res, err := http.PostForm(endpoint, vals)
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s for %s. Waiting to retry", endpoint, status.ClaimName)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.MarkVideoStatus(status) return a.MarkVideoStatus(status)
} }
return errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode != http.StatusOK { if res.StatusCode != http.StatusOK {
@ -356,10 +361,13 @@ func (a *APIConfig) VideoState(videoID string) (string, error) {
res, err := http.PostForm(endpoint, vals) res, err := http.PostForm(endpoint, vals)
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.VideoState(videoID) return a.VideoState(videoID)
} }
return "", errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode == http.StatusNotFound { if res.StatusCode == http.StatusNotFound {
@ -407,10 +415,13 @@ func (a *APIConfig) GetReleasedDate(videoID string) (*VideoRelease, error) {
res, err := http.PostForm(endpoint, vals) res, err := http.PostForm(endpoint, vals)
if err != nil { if err != nil {
util.SendErrorToSlack("error while trying to call %s. Waiting to retry: %s", endpoint, err.Error()) if strings.Contains(err.Error(), "EOF") {
util.SendErrorToSlack("EOF error while trying to call %s. Waiting to retry", endpoint)
time.Sleep(30 * time.Second) time.Sleep(30 * time.Second)
return a.GetReleasedDate(videoID) return a.GetReleasedDate(videoID)
} }
return nil, errors.Err(err)
}
defer res.Body.Close() defer res.Body.Close()
body, _ := ioutil.ReadAll(res.Body) body, _ := ioutil.ReadAll(res.Body)
if res.StatusCode == http.StatusNotFound { if res.StatusCode == http.StatusNotFound {

View file

@ -4,6 +4,8 @@ import (
"encoding/json" "encoding/json"
"time" "time"
"github.com/aws/aws-sdk-go/aws"
"github.com/aws/aws-sdk-go/aws/credentials"
"github.com/lbryio/lbry.go/v2/extras/errors" "github.com/lbryio/lbry.go/v2/extras/errors"
) )
@ -26,7 +28,6 @@ type YoutubeChannel struct {
SizeLimit int `json:"size_limit"` SizeLimit int `json:"size_limit"`
LastUploadedVideo string `json:"last_uploaded_video"` LastUploadedVideo string `json:"last_uploaded_video"`
WipeDB bool `json:"wipe_db"` WipeDB bool `json:"wipe_db"`
Language string `json:"language"`
} }
type PublishAddress struct { type PublishAddress struct {
@ -78,7 +79,6 @@ var ErrorsNoRetry = []string{
"giving up after 0 fragment retries", "giving up after 0 fragment retries",
"Sorry about that", "Sorry about that",
"This video is not available", "This video is not available",
"Video unavailable",
"requested format not available", "requested format not available",
"interrupted by user", "interrupted by user",
"Sign in to confirm your age", "Sign in to confirm your age",
@ -91,7 +91,6 @@ var ErrorsNoRetry = []string{
"Premiere will begin shortly", "Premiere will begin shortly",
"cannot unmarshal number 0.0", "cannot unmarshal number 0.0",
"default youtube thumbnail found", "default youtube thumbnail found",
"livestream is likely bugged",
} }
var WalletErrors = []string{ var WalletErrors = []string{
"Not enough funds to cover this transaction", "Not enough funds to cover this transaction",
@ -118,8 +117,6 @@ var NeverRetryFailures = []string{
"Playback on other websites has been disabled by the video owner", "Playback on other websites has been disabled by the video owner",
"uploader has not made this video available in your country", "uploader has not made this video available in your country",
"This video has been removed by the uploader", "This video has been removed by the uploader",
"Video unavailable",
"Video is not available - hardcoded fix",
} }
type SyncFlags struct { type SyncFlags struct {
@ -157,7 +154,7 @@ func (f *SyncFlags) VideosToSync(totalSubscribers uint) int {
800: 250, 800: 250,
600: 200, 600: 200,
200: 80, 200: 80,
100: 20, 100: 50,
1: 10, 1: 10,
} }
videosToSync := 0 videosToSync := 0
@ -196,10 +193,9 @@ const (
StatusFailed = "failed" StatusFailed = "failed"
StatusFinalized = "finalized" // no more changes allowed StatusFinalized = "finalized" // no more changes allowed
StatusAbandoned = "abandoned" // deleted on youtube or banned StatusAbandoned = "abandoned" // deleted on youtube or banned
StatusAgeRestricted = "agerestricted" // one or more videos are age restricted and should be reprocessed with special keys
) )
var SyncStatuses = []string{StatusPending, StatusPendingEmail, StatusPendingUpgrade, StatusQueued, StatusSyncing, StatusSynced, StatusFailed, StatusFinalized, StatusAbandoned, StatusWipeDb, StatusAgeRestricted} var SyncStatuses = []string{StatusPending, StatusPendingEmail, StatusPendingUpgrade, StatusQueued, StatusSyncing, StatusSynced, StatusFailed, StatusFinalized, StatusAbandoned, StatusWipeDb}
const LatestMetadataVersion = 2 const LatestMetadataVersion = 2
@ -208,14 +204,26 @@ const (
VideoStatusFailed = "failed" VideoStatusFailed = "failed"
VideoStatusUpgradeFailed = "upgradefailed" VideoStatusUpgradeFailed = "upgradefailed"
VideoStatusUnpublished = "unpublished" VideoStatusUnpublished = "unpublished"
VideoStatusTransferFailed = "transferfailed" VideoStatusTranferFailed = "transferfailed"
) )
var VideoSyncStatuses = []string{VideoStatusPublished, VideoStatusFailed, VideoStatusUpgradeFailed, VideoStatusUnpublished, VideoStatusTransferFailed}
const ( const (
TransferStateNotTouched = iota TransferStateNotTouched = iota
TransferStatePending TransferStatePending
TransferStateComplete TransferStateComplete
TransferStateManual TransferStateManual
) )
type AwsConfigs struct {
AwsS3ID string
AwsS3Secret string
AwsS3Region string
AwsS3Bucket string
}
func (a *AwsConfigs) GetS3AWSConfig() *aws.Config {
return &aws.Config{
Credentials: credentials.NewStaticCredentials(a.AwsS3ID, a.AwsS3Secret, ""),
Region: &a.AwsS3Region,
}
}

View file

@ -16,12 +16,17 @@ import (
"syscall" "syscall"
"time" "time"
"github.com/abadojack/whatlanggo"
"github.com/lbryio/ytsync/v5/downloader" "github.com/lbryio/ytsync/v5/downloader"
"github.com/lbryio/ytsync/v5/downloader/ytdl" "github.com/lbryio/ytsync/v5/downloader/ytdl"
"github.com/lbryio/ytsync/v5/shared"
"github.com/vbauerster/mpb/v7"
"github.com/vbauerster/mpb/v7/decor"
"gopkg.in/vansante/go-ffprobe.v2"
"github.com/lbryio/ytsync/v5/ip_manager" "github.com/lbryio/ytsync/v5/ip_manager"
"github.com/lbryio/ytsync/v5/namer" "github.com/lbryio/ytsync/v5/namer"
"github.com/lbryio/ytsync/v5/sdk" "github.com/lbryio/ytsync/v5/sdk"
"github.com/lbryio/ytsync/v5/shared"
"github.com/lbryio/ytsync/v5/tags_manager" "github.com/lbryio/ytsync/v5/tags_manager"
"github.com/lbryio/ytsync/v5/thumbs" "github.com/lbryio/ytsync/v5/thumbs"
"github.com/lbryio/ytsync/v5/timing" "github.com/lbryio/ytsync/v5/timing"
@ -32,12 +37,9 @@ import (
"github.com/lbryio/lbry.go/v2/extras/stop" "github.com/lbryio/lbry.go/v2/extras/stop"
"github.com/lbryio/lbry.go/v2/extras/util" "github.com/lbryio/lbry.go/v2/extras/util"
"github.com/abadojack/whatlanggo" "github.com/aws/aws-sdk-go/aws"
"github.com/shopspring/decimal" "github.com/shopspring/decimal"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
"github.com/vbauerster/mpb/v7"
"github.com/vbauerster/mpb/v7/decor"
"gopkg.in/vansante/go-ffprobe.v2"
) )
type YoutubeVideo struct { type YoutubeVideo struct {
@ -53,6 +55,7 @@ type YoutubeVideo struct {
youtubeInfo *ytdl.YtdlVideo youtubeInfo *ytdl.YtdlVideo
youtubeChannelID string youtubeChannelID string
tags []string tags []string
awsConfig aws.Config
thumbnailURL string thumbnailURL string
lbryChannelID string lbryChannelID string
mocked bool mocked bool
@ -98,7 +101,7 @@ var youtubeCategories = map[string]string{
"44": "trailers", "44": "trailers",
} }
func NewYoutubeVideo(directory string, videoData *ytdl.YtdlVideo, playlistPosition int64, stopGroup *stop.Group, pool *ip_manager.IPPool) (*YoutubeVideo, error) { func NewYoutubeVideo(directory string, videoData *ytdl.YtdlVideo, playlistPosition int64, awsConfig aws.Config, stopGroup *stop.Group, pool *ip_manager.IPPool) (*YoutubeVideo, error) {
// youtube-dl returns times in local timezone sometimes. this could break in the future // youtube-dl returns times in local timezone sometimes. this could break in the future
// maybe we can file a PR to choose the timezone we want from youtube-dl // maybe we can file a PR to choose the timezone we want from youtube-dl
return &YoutubeVideo{ return &YoutubeVideo{
@ -106,21 +109,22 @@ func NewYoutubeVideo(directory string, videoData *ytdl.YtdlVideo, playlistPositi
title: videoData.Title, title: videoData.Title,
description: videoData.Description, description: videoData.Description,
playlistPosition: playlistPosition, playlistPosition: playlistPosition,
publishedAt: videoData.GetUploadTime(), publishedAt: videoData.UploadDateForReal,
dir: directory, dir: directory,
youtubeInfo: videoData, youtubeInfo: videoData,
awsConfig: awsConfig,
mocked: false, mocked: false,
youtubeChannelID: videoData.ChannelID, youtubeChannelID: videoData.ChannelID,
stopGroup: stopGroup, stopGroup: stopGroup,
pool: pool, pool: pool,
}, nil }, nil
} }
func NewMockedVideo(directory string, videoID string, youtubeChannelID string, awsConfig aws.Config, stopGroup *stop.Group, pool *ip_manager.IPPool) *YoutubeVideo {
func NewMockedVideo(directory string, videoID string, youtubeChannelID string, stopGroup *stop.Group, pool *ip_manager.IPPool) *YoutubeVideo {
return &YoutubeVideo{ return &YoutubeVideo{
id: videoID, id: videoID,
playlistPosition: 0, playlistPosition: 0,
dir: directory, dir: directory,
awsConfig: awsConfig,
mocked: true, mocked: true,
youtubeChannelID: youtubeChannelID, youtubeChannelID: youtubeChannelID,
stopGroup: stopGroup, stopGroup: stopGroup,
@ -175,7 +179,7 @@ func (v *YoutubeVideo) getFullPath() string {
} }
func (v *YoutubeVideo) getAbbrevDescription() string { func (v *YoutubeVideo) getAbbrevDescription() string {
maxLength := 6500 maxLength := 2800
description := strings.TrimSpace(v.description) description := strings.TrimSpace(v.description)
additionalDescription := "\nhttps://www.youtube.com/watch?v=" + v.id additionalDescription := "\nhttps://www.youtube.com/watch?v=" + v.id
khanAcademyClaimID := "5fc52291980268b82413ca4c0ace1b8d749f3ffb" khanAcademyClaimID := "5fc52291980268b82413ca4c0ace1b8d749f3ffb"
@ -320,7 +324,7 @@ func (v *YoutubeVideo) download() error {
//speedThrottleRetries := 3 //speedThrottleRetries := 3
for i := 0; i < len(qualities); i++ { for i := 0; i < len(qualities); i++ {
quality := qualities[i] quality := qualities[i]
argsWithFilters := append(ytdlArgs, "-fbestvideo[ext=mp4][vcodec!*=av01][height<="+quality+"]+bestaudio[ext!=webm][format_id!=258][format_id!=380][format_id!=251][format_id!=256][format_id!=327][format_id!=328]") argsWithFilters := append(ytdlArgs, "-fbestvideo[ext=mp4][vcodec!*=av01][height<="+quality+"]+bestaudio[ext!=webm][format_id!=258][format_id!=251][format_id!=256][format_id!=327]")
argsWithFilters = append(argsWithFilters, userAgent...) argsWithFilters = append(argsWithFilters, userAgent...)
//if speedThrottleRetries > 0 { //if speedThrottleRetries > 0 {
// speedThrottleRetries-- // speedThrottleRetries--
@ -514,28 +518,21 @@ func (v *YoutubeVideo) trackProgressBar(argsWithFilters []string, ticker *time.T
bar.Completed() bar.Completed()
bar.Abort(true) bar.Abort(true)
}() }()
origSize := int64(0)
lastUpdate := time.Now()
for { for {
select { select {
case <-done.Ch(): case <-done.Ch():
return return
case <-ticker.C: case <-ticker.C:
var err error
size, err := logUtils.DirSize(v.videoDir()) size, err := logUtils.DirSize(v.videoDir())
if err != nil { if err != nil {
log.Errorf("error while getting size of download directory: %s", errors.FullTrace(err)) log.Errorf("error while getting size of download directory: %s", errors.FullTrace(err))
return return
} }
if size > origSize {
origSize = size
bar.SetCurrent(size) bar.SetCurrent(size)
if size > int64(videoSize+audioSize) { if size > int64(videoSize+audioSize) {
bar.SetTotal(size+2048, false) bar.SetTotal(size+2048, false)
} }
bar.DecoratorEwmaUpdate(time.Since(lastUpdate)) bar.DecoratorEwmaUpdate(400 * time.Millisecond)
lastUpdate = time.Now()
}
} }
} }
}() }()
@ -691,7 +688,7 @@ func (v *YoutubeVideo) triggerThumbnailSave() (err error) {
if thumbnail.Width == 0 { if thumbnail.Width == 0 {
return errors.Err("default youtube thumbnail found") return errors.Err("default youtube thumbnail found")
} }
v.thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID()) v.thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID(), v.awsConfig)
return err return err
} }
@ -780,9 +777,6 @@ func (v *YoutubeVideo) Sync(daemon *jsonrpc.Client, params SyncParams, existingV
func (v *YoutubeVideo) downloadAndPublish(daemon *jsonrpc.Client, params SyncParams) (*SyncSummary, error) { func (v *YoutubeVideo) downloadAndPublish(daemon *jsonrpc.Client, params SyncParams) (*SyncSummary, error) {
var err error var err error
if v.youtubeInfo == nil {
return nil, errors.Err("Video is not available - hardcoded fix")
}
dur := time.Duration(v.youtubeInfo.Duration) * time.Second dur := time.Duration(v.youtubeInfo.Duration) * time.Second
minDuration := 7 * time.Second minDuration := 7 * time.Second
@ -790,9 +784,6 @@ func (v *YoutubeVideo) downloadAndPublish(daemon *jsonrpc.Client, params SyncPar
if v.youtubeInfo.IsLive == true { if v.youtubeInfo.IsLive == true {
return nil, errors.Err("video is a live stream and hasn't completed yet") return nil, errors.Err("video is a live stream and hasn't completed yet")
} }
if v.youtubeInfo.Availability != "public" {
return nil, errors.Err("video is not public")
}
if dur > v.maxVideoLength { if dur > v.maxVideoLength {
logUtils.SendErrorToSlack("%s is %s long and the limit is %s", v.id, dur.String(), v.maxVideoLength.String()) logUtils.SendErrorToSlack("%s is %s long and the limit is %s", v.id, dur.String(), v.maxVideoLength.String())
return nil, errors.Err("video is too long to process") return nil, errors.Err("video is too long to process")
@ -801,11 +792,6 @@ func (v *YoutubeVideo) downloadAndPublish(daemon *jsonrpc.Client, params SyncPar
logUtils.SendErrorToSlack("%s is %s long and the minimum is %s", v.id, dur.String(), minDuration.String()) logUtils.SendErrorToSlack("%s is %s long and the minimum is %s", v.id, dur.String(), minDuration.String())
return nil, errors.Err("video is too short to process") return nil, errors.Err("video is too short to process")
} }
buggedLivestream := v.youtubeInfo.LiveStatus == "post_live"
if buggedLivestream && dur >= 2*time.Hour {
return nil, errors.Err("livestream is likely bugged as it was recently published and has a length of %s which is more than 2 hours", dur.String())
}
for { for {
err = v.download() err = v.download()
if err != nil && strings.Contains(err.Error(), "HTTP Error 429") { if err != nil && strings.Contains(err.Error(), "HTTP Error 429") {
@ -875,11 +861,7 @@ func (v *YoutubeVideo) getMetadata() (languages []string, locations []jsonrpc.Lo
} }
func (v *YoutubeVideo) reprocess(daemon *jsonrpc.Client, params SyncParams, existingVideoData *sdk.SyncedVideo) (*SyncSummary, error) { func (v *YoutubeVideo) reprocess(daemon *jsonrpc.Client, params SyncParams, existingVideoData *sdk.SyncedVideo) (*SyncSummary, error) {
c, err := daemon.ClaimSearch(jsonrpc.ClaimSearchArgs{ c, err := daemon.ClaimSearch(nil, &existingVideoData.ClaimID, nil, nil, 1, 20)
ClaimID: &existingVideoData.ClaimID,
Page: 1,
PageSize: 20,
})
if err != nil { if err != nil {
return nil, errors.Err(err) return nil, errors.Err(err)
} }
@ -898,7 +880,7 @@ func (v *YoutubeVideo) reprocess(daemon *jsonrpc.Client, params SyncParams, exis
return nil, errors.Err("could not find thumbnail for mocked video") return nil, errors.Err("could not find thumbnail for mocked video")
} }
thumbnail := thumbs.GetBestThumbnail(v.youtubeInfo.Thumbnails) thumbnail := thumbs.GetBestThumbnail(v.youtubeInfo.Thumbnails)
thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID()) thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID(), v.awsConfig)
} else { } else {
thumbnailURL = thumbs.ThumbnailEndpoint + v.ID() thumbnailURL = thumbs.ThumbnailEndpoint + v.ID()
} }
@ -948,7 +930,6 @@ func (v *YoutubeVideo) reprocess(daemon *jsonrpc.Client, params SyncParams, exis
Height: util.PtrToUint(720), Height: util.PtrToUint(720),
Width: util.PtrToUint(1280), Width: util.PtrToUint(1280),
Fee: fee, Fee: fee,
ReleaseTime: util.PtrToInt64(v.publishedAt.Unix()),
} }
v.walletLock.RLock() v.walletLock.RLock()

View file

@ -6,7 +6,6 @@ import (
"os" "os"
"strings" "strings"
"github.com/lbryio/ytsync/v5/configs"
"github.com/lbryio/ytsync/v5/downloader/ytdl" "github.com/lbryio/ytsync/v5/downloader/ytdl"
"github.com/lbryio/lbry.go/v2/extras/errors" "github.com/lbryio/lbry.go/v2/extras/errors"
@ -84,11 +83,11 @@ func (u *thumbnailUploader) deleteTmpFile() {
log.Infof("failed to delete local thumbnail file: %s", err.Error()) log.Infof("failed to delete local thumbnail file: %s", err.Error())
} }
} }
func MirrorThumbnail(url string, name string) (string, error) { func MirrorThumbnail(url string, name string, s3Config aws.Config) (string, error) {
tu := thumbnailUploader{ tu := thumbnailUploader{
originalUrl: url, originalUrl: url,
name: name, name: name,
s3Config: *configs.Configuration.AWSThumbnailsS3Config.GetS3AWSConfig(), s3Config: s3Config,
} }
err := tu.downloadThumbnail() err := tu.downloadThumbnail()
if err != nil { if err != nil {
@ -101,12 +100,14 @@ func MirrorThumbnail(url string, name string) (string, error) {
return "", err return "", err
} }
//this is our own S3 storage ownS3Config := s3Config.Copy(&aws.Config{Endpoint: aws.String("s3.lbry.tech")})
tu2 := thumbnailUploader{ tu2 := thumbnailUploader{
originalUrl: url, originalUrl: url,
name: name, name: name,
s3Config: *configs.Configuration.ThumbnailsS3Config.GetS3AWSConfig(), s3Config: *ownS3Config,
} }
//own S3
err = tu2.uploadThumbnail() err = tu2.uploadThumbnail()
if err != nil { if err != nil {
return "", err return "", err

View file

@ -1,109 +0,0 @@
package util
import (
"archive/tar"
"io"
"io/fs"
"os"
"path/filepath"
"github.com/lbryio/lbry.go/v2/extras/errors"
)
func CreateTarball(tarballFilePath string, filePaths []string) error {
file, err := os.Create(tarballFilePath)
if err != nil {
return errors.Err("Could not create tarball file '%s', got error '%s'", tarballFilePath, err.Error())
}
defer file.Close()
tarWriter := tar.NewWriter(file)
defer tarWriter.Close()
for _, filePath := range filePaths {
err := addFileToTarWriter(filePath, tarWriter)
if err != nil {
return errors.Err("Could not add file '%s', to tarball, got error '%s'", filePath, err.Error())
}
}
return nil
}
func addFileToTarWriter(filePath string, tarWriter *tar.Writer) error {
file, err := os.Open(filePath)
if err != nil {
return errors.Err("Could not open file '%s', got error '%s'", filePath, err.Error())
}
defer file.Close()
stat, err := file.Stat()
if err != nil {
return errors.Err("Could not get stat for file '%s', got error '%s'", filePath, err.Error())
}
header := &tar.Header{
Name: stat.Name(),
Size: stat.Size(),
Mode: int64(stat.Mode()),
ModTime: stat.ModTime(),
}
err = tarWriter.WriteHeader(header)
if err != nil {
return errors.Err("Could not write header for file '%s', got error '%s'", filePath, err.Error())
}
_, err = io.Copy(tarWriter, file)
if err != nil {
return errors.Err("Could not copy the file '%s' data to the tarball, got error '%s'", filePath, err.Error())
}
return nil
}
func Untar(tarball, target string) error {
reader, err := os.Open(tarball)
if err != nil {
return errors.Err(err)
}
defer reader.Close()
tarReader := tar.NewReader(reader)
for {
header, err := tarReader.Next()
if err == io.EOF {
break
} else if err != nil {
return errors.Err(err)
}
path := filepath.Join(target, header.Name)
info := header.FileInfo()
if info.IsDir() {
if err = os.MkdirAll(path, info.Mode()); err != nil {
return errors.Err(err)
}
continue
}
err = extractFile(path, info, tarReader)
if err != nil {
return err
}
}
return nil
}
func extractFile(path string, info fs.FileInfo, tarReader *tar.Reader) error {
file, err := os.OpenFile(path, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, info.Mode())
if err != nil {
return errors.Err(err)
}
defer file.Close()
_, err = io.Copy(file, tarReader)
if err != nil {
return errors.Err(err)
}
return nil
}

View file

@ -14,9 +14,7 @@ func SendErrorToSlack(format string, a ...interface{}) {
message = fmt.Sprintf(format, a...) message = fmt.Sprintf(format, a...)
} }
log.Errorln(message) log.Errorln(message)
log.SetLevel(log.InfoLevel) //I don't want to change the underlying lib so this will do... err := util.SendToSlack(":sos: " + message)
err := util.SendToSlack(":sos: ```" + message + "```")
log.SetLevel(log.DebugLevel)
if err != nil { if err != nil {
log.Errorln(err) log.Errorln(err)
} }
@ -29,9 +27,7 @@ func SendInfoToSlack(format string, a ...interface{}) {
message = fmt.Sprintf(format, a...) message = fmt.Sprintf(format, a...)
} }
log.Infoln(message) log.Infoln(message)
log.SetLevel(log.InfoLevel) //I don't want to change the underlying lib so this will do...
err := util.SendToSlack(":information_source: " + message) err := util.SendToSlack(":information_source: " + message)
log.SetLevel(log.DebugLevel)
if err != nil { if err != nil {
log.Errorln(err) log.Errorln(err)
} }

View file

@ -11,7 +11,6 @@ import (
"github.com/lbryio/lbry.go/v2/extras/errors" "github.com/lbryio/lbry.go/v2/extras/errors"
"github.com/lbryio/lbry.go/v2/lbrycrd" "github.com/lbryio/lbry.go/v2/lbrycrd"
"github.com/lbryio/ytsync/v5/configs"
"github.com/lbryio/ytsync/v5/timing" "github.com/lbryio/ytsync/v5/timing"
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types"
@ -186,9 +185,9 @@ func CleanForStartup() error {
return errors.Err(err) return errors.Err(err)
} }
lbrycrd, err := GetLbrycrdClient(configs.Configuration.LbrycrdString) lbrycrd, err := GetLbrycrdClient(os.Getenv("LBRYCRD_STRING"))
if err != nil { if err != nil {
return errors.Prefix("error getting lbrycrd client", err) return errors.Prefix("error getting lbrycrd client: ", err)
} }
height, err := lbrycrd.GetBlockCount() height, err := lbrycrd.GetBlockCount()
if err != nil { if err != nil {

View file

@ -29,6 +29,7 @@ import (
"github.com/lbryio/lbry.go/v2/extras/stop" "github.com/lbryio/lbry.go/v2/extras/stop"
"github.com/lbryio/lbry.go/v2/extras/util" "github.com/lbryio/lbry.go/v2/extras/util"
"github.com/aws/aws-sdk-go/aws"
log "github.com/sirupsen/logrus" log "github.com/sirupsen/logrus"
) )
@ -49,13 +50,14 @@ func (a byPublishedAt) Less(i, j int) bool { return a[i].PublishedAt().Before(a[
type VideoParams struct { type VideoParams struct {
VideoDir string VideoDir string
S3Config aws.Config
Stopper *stop.Group Stopper *stop.Group
IPPool *ip_manager.IPPool IPPool *ip_manager.IPPool
} }
var mostRecentlyFailedChannel string // TODO: fix this hack! var mostRecentlyFailedChannel string // TODO: fix this hack!
func GetVideosToSync(channelID string, syncedVideos map[string]sdk.SyncedVideo, quickSync bool, maxVideos int, videoParams VideoParams, lastUploadedVideo string) ([]Video, error) { func GetVideosToSync(config *sdk.APIConfig, channelID string, syncedVideos map[string]sdk.SyncedVideo, quickSync bool, maxVideos int, videoParams VideoParams, lastUploadedVideo string) ([]Video, error) {
var videos []Video var videos []Video
if quickSync && maxVideos > 50 { if quickSync && maxVideos > 50 {
maxVideos = 50 maxVideos = 50
@ -94,14 +96,14 @@ func GetVideosToSync(channelID string, syncedVideos map[string]sdk.SyncedVideo,
mostRecentlyFailedChannel = channelID mostRecentlyFailedChannel = channelID
} }
vids, err := getVideos(channelID, videoIDs, videoParams.Stopper.Ch(), videoParams.IPPool) vids, err := getVideos(config, channelID, videoIDs, videoParams.Stopper.Ch(), videoParams.IPPool)
if err != nil { if err != nil {
return nil, err return nil, err
} }
for _, item := range vids { for _, item := range vids {
positionInList := playlistMap[item.ID] positionInList := playlistMap[item.ID]
videoToAdd, err := sources.NewYoutubeVideo(videoParams.VideoDir, item, positionInList, videoParams.Stopper, videoParams.IPPool) videoToAdd, err := sources.NewYoutubeVideo(videoParams.VideoDir, item, positionInList, videoParams.S3Config, videoParams.Stopper, videoParams.IPPool)
if err != nil { if err != nil {
return nil, errors.Err(err) return nil, errors.Err(err)
} }
@ -109,12 +111,11 @@ func GetVideosToSync(channelID string, syncedVideos map[string]sdk.SyncedVideo,
} }
for k, v := range syncedVideos { for k, v := range syncedVideos {
newMetadataVersion := int8(2) if !v.Published {
if !v.Published && v.MetadataVersion >= newMetadataVersion {
continue continue
} }
if _, ok := playlistMap[k]; !ok { if _, ok := playlistMap[k]; !ok {
videos = append(videos, sources.NewMockedVideo(videoParams.VideoDir, k, channelID, videoParams.Stopper, videoParams.IPPool)) videos = append(videos, sources.NewMockedVideo(videoParams.VideoDir, k, channelID, videoParams.S3Config, videoParams.Stopper, videoParams.IPPool))
} }
} }
@ -204,8 +205,7 @@ func ChannelInfo(channelID string) (*YoutubeStatsResponse, error) {
return &decodedResponse, nil return &decodedResponse, nil
} }
func getVideos(channelID string, videoIDs []string, stopChan stop.Chan, ipPool *ip_manager.IPPool) ([]*ytdl.YtdlVideo, error) { func getVideos(config *sdk.APIConfig, channelID string, videoIDs []string, stopChan stop.Chan, ipPool *ip_manager.IPPool) ([]*ytdl.YtdlVideo, error) {
config := sdk.GetAPIsConfigs()
var videos []*ytdl.YtdlVideo var videos []*ytdl.YtdlVideo
for _, videoID := range videoIDs { for _, videoID := range videoIDs {
if len(videoID) < 5 { if len(videoID) < 5 {
@ -217,6 +217,11 @@ func getVideos(channelID string, videoIDs []string, stopChan stop.Chan, ipPool *
default: default:
} }
//ip, err := ipPool.GetIP(videoID)
//if err != nil {
// return nil, err
//}
//video, err := downloader.GetVideoInformation(videoID, &net.TCPAddr{IP: net.ParseIP(ip)})
state, err := config.VideoState(videoID) state, err := config.VideoState(videoID)
if err != nil { if err != nil {
return nil, errors.Err(err) return nil, errors.Err(err)
@ -224,7 +229,7 @@ func getVideos(channelID string, videoIDs []string, stopChan stop.Chan, ipPool *
if state == "published" { if state == "published" {
continue continue
} }
video, err := downloader.GetVideoInformation(videoID, stopChan, ipPool) video, err := downloader.GetVideoInformation(config, videoID, stopChan, nil, ipPool)
if err != nil { if err != nil {
errSDK := config.MarkVideoStatus(shared.VideoStatus{ errSDK := config.MarkVideoStatus(shared.VideoStatus{
ChannelID: channelID, ChannelID: channelID,