2017-12-28 18:14:33 +01:00
|
|
|
package sources
|
|
|
|
|
|
|
|
import (
|
2020-11-19 03:11:23 +01:00
|
|
|
"context"
|
2021-06-17 17:51:21 +02:00
|
|
|
"encoding/json"
|
2019-05-03 05:11:52 +02:00
|
|
|
"fmt"
|
2019-05-28 18:33:42 +02:00
|
|
|
"io/ioutil"
|
2017-12-28 18:14:33 +01:00
|
|
|
"os"
|
2019-05-28 18:33:42 +02:00
|
|
|
"os/exec"
|
2021-06-17 17:51:21 +02:00
|
|
|
"path"
|
2019-05-28 22:40:31 +02:00
|
|
|
"path/filepath"
|
2017-12-28 18:14:33 +01:00
|
|
|
"regexp"
|
|
|
|
"strconv"
|
|
|
|
"strings"
|
2019-06-12 03:17:59 +02:00
|
|
|
"sync"
|
2021-06-18 04:47:08 +02:00
|
|
|
"syscall"
|
2018-09-18 21:20:34 +02:00
|
|
|
"time"
|
2018-08-23 00:28:31 +02:00
|
|
|
|
2021-06-17 17:51:21 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/downloader"
|
2020-07-27 23:14:06 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/downloader/ytdl"
|
2020-06-11 18:45:56 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/ip_manager"
|
|
|
|
"github.com/lbryio/ytsync/v5/namer"
|
|
|
|
"github.com/lbryio/ytsync/v5/sdk"
|
2022-05-09 19:59:19 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/shared"
|
2020-06-11 18:45:56 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/tags_manager"
|
|
|
|
"github.com/lbryio/ytsync/v5/thumbs"
|
2020-07-27 20:48:05 +02:00
|
|
|
"github.com/lbryio/ytsync/v5/timing"
|
|
|
|
logUtils "github.com/lbryio/ytsync/v5/util"
|
|
|
|
|
|
|
|
"github.com/lbryio/lbry.go/v2/extras/errors"
|
|
|
|
"github.com/lbryio/lbry.go/v2/extras/jsonrpc"
|
|
|
|
"github.com/lbryio/lbry.go/v2/extras/stop"
|
|
|
|
"github.com/lbryio/lbry.go/v2/extras/util"
|
2017-12-28 18:14:33 +01:00
|
|
|
|
2022-05-09 19:59:19 +02:00
|
|
|
"github.com/abadojack/whatlanggo"
|
2019-06-06 02:16:07 +02:00
|
|
|
"github.com/shopspring/decimal"
|
2017-12-28 18:14:33 +01:00
|
|
|
log "github.com/sirupsen/logrus"
|
2022-05-09 19:59:19 +02:00
|
|
|
"github.com/vbauerster/mpb/v7"
|
|
|
|
"github.com/vbauerster/mpb/v7/decor"
|
|
|
|
"gopkg.in/vansante/go-ffprobe.v2"
|
2017-12-28 18:14:33 +01:00
|
|
|
)
|
|
|
|
|
|
|
|
type YoutubeVideo struct {
|
|
|
|
id string
|
|
|
|
title string
|
|
|
|
description string
|
|
|
|
playlistPosition int64
|
2018-08-14 17:09:23 +02:00
|
|
|
size *int64
|
2018-09-18 23:28:25 +02:00
|
|
|
maxVideoSize int64
|
2020-07-28 01:35:07 +02:00
|
|
|
maxVideoLength time.Duration
|
2017-12-28 18:14:33 +01:00
|
|
|
publishedAt time.Time
|
|
|
|
dir string
|
2020-07-27 23:14:06 +02:00
|
|
|
youtubeInfo *ytdl.YtdlVideo
|
2019-05-31 23:15:22 +02:00
|
|
|
youtubeChannelID string
|
2019-04-19 03:22:51 +02:00
|
|
|
tags []string
|
2019-05-07 16:01:11 +02:00
|
|
|
thumbnailURL string
|
2019-05-07 21:15:43 +02:00
|
|
|
lbryChannelID string
|
2019-05-31 16:38:31 +02:00
|
|
|
mocked bool
|
2019-06-12 03:17:59 +02:00
|
|
|
walletLock *sync.RWMutex
|
2019-07-12 21:32:49 +02:00
|
|
|
stopGroup *stop.Group
|
2019-12-10 23:02:56 +01:00
|
|
|
pool *ip_manager.IPPool
|
2021-06-17 17:51:21 +02:00
|
|
|
progressBars *mpb.Progress
|
|
|
|
progressBarWg *sync.WaitGroup
|
2019-05-03 05:11:52 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
var youtubeCategories = map[string]string{
|
2019-06-06 02:16:07 +02:00
|
|
|
"1": "film & animation",
|
|
|
|
"2": "autos & vehicles",
|
|
|
|
"10": "music",
|
|
|
|
"15": "pets & animals",
|
|
|
|
"17": "sports",
|
|
|
|
"18": "short movies",
|
|
|
|
"19": "travel & events",
|
|
|
|
"20": "gaming",
|
|
|
|
"21": "videoblogging",
|
|
|
|
"22": "people & blogs",
|
|
|
|
"23": "comedy",
|
|
|
|
"24": "entertainment",
|
|
|
|
"25": "news & politics",
|
|
|
|
"26": "howto & style",
|
|
|
|
"27": "education",
|
|
|
|
"28": "science & technology",
|
|
|
|
"29": "nonprofits & activism",
|
|
|
|
"30": "movies",
|
|
|
|
"31": "anime/animation",
|
|
|
|
"32": "action/adventure",
|
|
|
|
"33": "classics",
|
|
|
|
"34": "comedy",
|
|
|
|
"35": "documentary",
|
|
|
|
"36": "drama",
|
|
|
|
"37": "family",
|
|
|
|
"38": "foreign",
|
|
|
|
"39": "horror",
|
|
|
|
"40": "sci-fi/fantasy",
|
|
|
|
"41": "thriller",
|
|
|
|
"42": "shorts",
|
|
|
|
"43": "shows",
|
|
|
|
"44": "trailers",
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
|
2021-11-24 05:54:08 +01:00
|
|
|
func NewYoutubeVideo(directory string, videoData *ytdl.YtdlVideo, playlistPosition int64, stopGroup *stop.Group, pool *ip_manager.IPPool) (*YoutubeVideo, error) {
|
2020-07-28 02:59:47 +02:00
|
|
|
// youtube-dl returns times in local timezone sometimes. this could break in the future
|
|
|
|
// maybe we can file a PR to choose the timezone we want from youtube-dl
|
2018-08-17 16:05:54 +02:00
|
|
|
return &YoutubeVideo{
|
2020-07-27 23:14:06 +02:00
|
|
|
id: videoData.ID,
|
|
|
|
title: videoData.Title,
|
|
|
|
description: videoData.Description,
|
2019-04-19 03:22:51 +02:00
|
|
|
playlistPosition: playlistPosition,
|
2021-12-30 19:17:11 +01:00
|
|
|
publishedAt: videoData.GetUploadTime(),
|
2017-12-28 18:14:33 +01:00
|
|
|
dir: directory,
|
2019-04-19 03:22:51 +02:00
|
|
|
youtubeInfo: videoData,
|
2019-05-31 16:38:31 +02:00
|
|
|
mocked: false,
|
2020-07-27 23:14:06 +02:00
|
|
|
youtubeChannelID: videoData.ChannelID,
|
2019-07-12 21:32:49 +02:00
|
|
|
stopGroup: stopGroup,
|
2019-12-10 23:02:56 +01:00
|
|
|
pool: pool,
|
2020-07-27 23:14:06 +02:00
|
|
|
}, nil
|
2019-05-31 16:38:31 +02:00
|
|
|
}
|
2021-11-24 05:54:08 +01:00
|
|
|
|
|
|
|
func NewMockedVideo(directory string, videoID string, youtubeChannelID string, stopGroup *stop.Group, pool *ip_manager.IPPool) *YoutubeVideo {
|
2019-05-31 16:38:31 +02:00
|
|
|
return &YoutubeVideo{
|
|
|
|
id: videoID,
|
|
|
|
playlistPosition: 0,
|
|
|
|
dir: directory,
|
|
|
|
mocked: true,
|
2019-05-31 23:15:22 +02:00
|
|
|
youtubeChannelID: youtubeChannelID,
|
2019-07-12 21:32:49 +02:00
|
|
|
stopGroup: stopGroup,
|
2019-12-10 23:02:56 +01:00
|
|
|
pool: pool,
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) ID() string {
|
2017-12-28 18:14:33 +01:00
|
|
|
return v.id
|
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) PlaylistPosition() int {
|
2018-04-25 20:56:26 +02:00
|
|
|
return int(v.playlistPosition)
|
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) IDAndNum() string {
|
2017-12-28 18:14:33 +01:00
|
|
|
return v.ID() + " (" + strconv.Itoa(int(v.playlistPosition)) + " in channel)"
|
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) PublishedAt() time.Time {
|
2019-05-31 16:38:31 +02:00
|
|
|
if v.mocked {
|
|
|
|
return time.Unix(0, 0)
|
|
|
|
}
|
2017-12-28 18:14:33 +01:00
|
|
|
return v.publishedAt
|
|
|
|
}
|
|
|
|
|
2018-10-09 21:57:07 +02:00
|
|
|
func (v *YoutubeVideo) getFullPath() string {
|
2018-05-05 13:22:33 +02:00
|
|
|
maxLen := 30
|
2017-12-28 18:14:33 +01:00
|
|
|
reg := regexp.MustCompile(`[^a-zA-Z0-9]+`)
|
|
|
|
|
|
|
|
chunks := strings.Split(strings.ToLower(strings.Trim(reg.ReplaceAllString(v.title, "-"), "-")), "-")
|
|
|
|
|
|
|
|
name := chunks[0]
|
|
|
|
if len(name) > maxLen {
|
2018-08-10 14:41:21 +02:00
|
|
|
name = name[:maxLen]
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
|
|
|
|
for _, chunk := range chunks[1:] {
|
|
|
|
tmpName := name + "-" + chunk
|
|
|
|
if len(tmpName) > maxLen {
|
|
|
|
if len(name) < 20 {
|
|
|
|
name = tmpName[:maxLen]
|
|
|
|
}
|
|
|
|
break
|
|
|
|
}
|
|
|
|
name = tmpName
|
|
|
|
}
|
2018-06-04 16:35:35 +02:00
|
|
|
if len(name) < 1 {
|
|
|
|
name = v.id
|
|
|
|
}
|
2018-07-24 02:01:35 +02:00
|
|
|
return v.videoDir() + "/" + name + ".mp4"
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) getAbbrevDescription() string {
|
2022-05-10 23:09:23 +02:00
|
|
|
maxLength := 6500
|
2017-12-28 18:14:33 +01:00
|
|
|
description := strings.TrimSpace(v.description)
|
2019-05-07 21:15:43 +02:00
|
|
|
additionalDescription := "\nhttps://www.youtube.com/watch?v=" + v.id
|
|
|
|
khanAcademyClaimID := "5fc52291980268b82413ca4c0ace1b8d749f3ffb"
|
|
|
|
if v.lbryChannelID == khanAcademyClaimID {
|
|
|
|
additionalDescription = additionalDescription + "\nNote: All Khan Academy content is available for free at (www.khanacademy.org)"
|
|
|
|
}
|
2019-08-02 16:01:33 +02:00
|
|
|
if len(description) > maxLength {
|
|
|
|
description = description[:maxLength]
|
2019-06-13 19:33:58 +02:00
|
|
|
}
|
2019-08-02 16:01:33 +02:00
|
|
|
return description + "\n..." + additionalDescription
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
2021-06-17 17:51:21 +02:00
|
|
|
func checkCookiesIntegrity() error {
|
|
|
|
fi, err := os.Stat("cookies.txt")
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
if fi.Size() == 0 {
|
|
|
|
log.Errorf("cookies were cleared out. Attempting a restore from cookies-backup.txt")
|
|
|
|
input, err := ioutil.ReadFile("cookies-backup.txt")
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
err = ioutil.WriteFile("cookies.txt", input, 0644)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
}
|
2017-12-28 18:14:33 +01:00
|
|
|
|
2019-12-14 14:58:04 +01:00
|
|
|
func (v *YoutubeVideo) download() error {
|
2020-05-19 23:13:01 +02:00
|
|
|
start := time.Now()
|
|
|
|
defer func(start time.Time) {
|
|
|
|
timing.TimedComponent("download").Add(time.Since(start))
|
|
|
|
}(start)
|
2021-08-24 18:03:03 +02:00
|
|
|
|
2019-07-11 19:14:15 +02:00
|
|
|
videoPath := v.getFullPath()
|
|
|
|
|
2019-08-11 04:50:43 +02:00
|
|
|
err := os.Mkdir(v.videoDir(), 0777)
|
2019-07-11 19:14:15 +02:00
|
|
|
if err != nil && !strings.Contains(err.Error(), "file exists") {
|
|
|
|
return errors.Wrap(err, 0)
|
|
|
|
}
|
|
|
|
|
|
|
|
_, err = os.Stat(videoPath)
|
|
|
|
if err != nil && !os.IsNotExist(err) {
|
2019-07-11 16:22:58 +02:00
|
|
|
return errors.Err(err)
|
2019-07-11 19:14:15 +02:00
|
|
|
} else if err == nil {
|
|
|
|
log.Debugln(v.id + " already exists at " + videoPath)
|
|
|
|
return nil
|
2019-07-11 16:22:58 +02:00
|
|
|
}
|
2019-07-22 02:09:18 +02:00
|
|
|
qualities := []string{
|
2019-08-05 20:08:46 +02:00
|
|
|
"1080",
|
2019-07-22 02:09:18 +02:00
|
|
|
"720",
|
|
|
|
"480",
|
2021-06-18 04:47:08 +02:00
|
|
|
"360",
|
2019-07-22 02:09:18 +02:00
|
|
|
}
|
2020-08-18 00:03:38 +02:00
|
|
|
dur := time.Duration(v.youtubeInfo.Duration) * time.Second
|
2021-06-18 04:47:08 +02:00
|
|
|
if dur.Hours() > 1 { //for videos longer than 1 hour only sync up to 720p
|
2020-08-18 00:03:38 +02:00
|
|
|
qualities = []string{
|
|
|
|
"720",
|
|
|
|
"480",
|
2021-06-18 04:47:08 +02:00
|
|
|
"360",
|
2020-08-18 00:03:38 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-06-17 17:51:21 +02:00
|
|
|
metadataPath := path.Join(logUtils.GetVideoMetadataDir(), v.id+".info.json")
|
|
|
|
_, err = os.Stat(metadataPath)
|
|
|
|
if err != nil {
|
|
|
|
if os.IsNotExist(err) {
|
|
|
|
return errors.Err("metadata information for video %s is missing! Why?", v.id)
|
|
|
|
}
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
|
|
|
|
metadata, err := parseVideoMetadata(metadataPath)
|
|
|
|
|
|
|
|
err = checkCookiesIntegrity()
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2019-07-11 19:14:15 +02:00
|
|
|
ytdlArgs := []string{
|
2019-05-28 18:33:42 +02:00
|
|
|
"--no-progress",
|
2019-07-22 02:51:13 +02:00
|
|
|
"-o" + strings.TrimSuffix(v.getFullPath(), ".mp4"),
|
2019-06-04 22:21:40 +02:00
|
|
|
"--merge-output-format",
|
2019-07-22 02:27:14 +02:00
|
|
|
"mp4",
|
2019-07-26 00:15:14 +02:00
|
|
|
"--postprocessor-args",
|
2021-06-17 17:51:21 +02:00
|
|
|
"ffmpeg:-movflags faststart",
|
2019-07-22 02:09:18 +02:00
|
|
|
"--abort-on-unavailable-fragment",
|
|
|
|
"--fragment-retries",
|
2020-08-18 00:03:38 +02:00
|
|
|
"1",
|
2020-01-03 15:59:05 +01:00
|
|
|
"--cookies",
|
|
|
|
"cookies.txt",
|
2021-07-14 04:42:40 +02:00
|
|
|
"--extractor-args",
|
|
|
|
"youtube:player_client=android",
|
|
|
|
//"--concurrent-fragments",
|
|
|
|
//"2",
|
2021-06-17 17:51:21 +02:00
|
|
|
"--load-info-json",
|
|
|
|
metadataPath,
|
2019-07-11 19:14:15 +02:00
|
|
|
}
|
2021-06-25 19:09:00 +02:00
|
|
|
|
2021-06-17 17:51:21 +02:00
|
|
|
userAgent := []string{"--user-agent", downloader.ChromeUA}
|
2019-07-26 00:22:56 +02:00
|
|
|
if v.maxVideoSize > 0 {
|
|
|
|
ytdlArgs = append(ytdlArgs,
|
|
|
|
"--max-filesize",
|
|
|
|
fmt.Sprintf("%dM", v.maxVideoSize),
|
|
|
|
)
|
|
|
|
}
|
|
|
|
if v.maxVideoLength > 0 {
|
|
|
|
ytdlArgs = append(ytdlArgs,
|
|
|
|
"--match-filter",
|
2020-07-28 01:35:07 +02:00
|
|
|
fmt.Sprintf("duration <= %d", int(v.maxVideoLength.Seconds())),
|
2019-07-26 00:22:56 +02:00
|
|
|
)
|
|
|
|
}
|
2019-12-10 23:02:56 +01:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
var sourceAddress string
|
|
|
|
for {
|
|
|
|
sourceAddress, err = v.pool.GetIP(v.id)
|
|
|
|
if err != nil {
|
|
|
|
if errors.Is(err, ip_manager.ErrAllThrottled) {
|
2019-07-12 21:32:49 +02:00
|
|
|
select {
|
|
|
|
case <-v.stopGroup.Ch():
|
|
|
|
return errors.Err("interrupted by user")
|
|
|
|
default:
|
2019-12-20 18:49:33 +01:00
|
|
|
time.Sleep(ip_manager.IPCooldownPeriod)
|
|
|
|
continue
|
2019-07-13 12:15:19 +02:00
|
|
|
}
|
2019-12-20 18:49:33 +01:00
|
|
|
} else {
|
|
|
|
return err
|
2019-07-12 21:32:49 +02:00
|
|
|
}
|
|
|
|
}
|
2019-12-20 18:49:33 +01:00
|
|
|
break
|
2019-07-12 20:42:44 +02:00
|
|
|
}
|
2019-12-10 23:02:56 +01:00
|
|
|
defer v.pool.ReleaseIP(sourceAddress)
|
2019-12-20 18:49:33 +01:00
|
|
|
|
2019-12-10 23:02:56 +01:00
|
|
|
ytdlArgs = append(ytdlArgs,
|
|
|
|
"--source-address",
|
|
|
|
sourceAddress,
|
2021-06-17 19:13:44 +02:00
|
|
|
fmt.Sprintf("https://www.youtube.com/watch?v=%s", v.id),
|
2019-12-10 23:02:56 +01:00
|
|
|
)
|
2021-07-14 04:42:40 +02:00
|
|
|
//speedThrottleRetries := 3
|
2020-08-18 00:03:38 +02:00
|
|
|
for i := 0; i < len(qualities); i++ {
|
|
|
|
quality := qualities[i]
|
2022-05-09 19:59:19 +02:00
|
|
|
argsWithFilters := append(ytdlArgs, "-fbestvideo[ext=mp4][vcodec!*=av01][height<="+quality+"]+bestaudio[ext!=webm][format_id!=258][format_id!=380][format_id!=251][format_id!=256][format_id!=327][format_id!=328]")
|
2020-08-18 00:03:38 +02:00
|
|
|
argsWithFilters = append(argsWithFilters, userAgent...)
|
2021-07-14 04:42:40 +02:00
|
|
|
//if speedThrottleRetries > 0 {
|
|
|
|
// speedThrottleRetries--
|
|
|
|
// argsWithFilters = append(argsWithFilters, "--throttled-rate", "180K")
|
|
|
|
//}
|
2021-06-17 17:51:21 +02:00
|
|
|
cmd := exec.Command("yt-dlp", argsWithFilters...)
|
|
|
|
log.Printf("Running command yt-dlp %s", strings.Join(argsWithFilters, " "))
|
2019-07-11 19:14:15 +02:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
stderr, err := cmd.StderrPipe()
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
stdout, err := cmd.StdoutPipe()
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
2019-07-11 19:14:15 +02:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
if err := cmd.Start(); err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
2021-06-17 17:51:21 +02:00
|
|
|
|
2021-06-18 04:47:08 +02:00
|
|
|
dlStopGrp := stop.New()
|
2021-06-18 01:23:25 +02:00
|
|
|
|
2021-06-18 04:47:08 +02:00
|
|
|
ticker := time.NewTicker(400 * time.Millisecond)
|
|
|
|
go v.trackProgressBar(argsWithFilters, ticker, metadata, dlStopGrp, sourceAddress)
|
|
|
|
|
|
|
|
//ticker2 := time.NewTicker(10 * time.Second)
|
|
|
|
//v.monitorSlowDownload(ticker, dlStopGrp, sourceAddress, cmd)
|
2019-07-11 19:14:15 +02:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
errorLog, _ := ioutil.ReadAll(stderr)
|
|
|
|
outLog, _ := ioutil.ReadAll(stdout)
|
|
|
|
err = cmd.Wait()
|
2021-06-17 17:51:21 +02:00
|
|
|
|
|
|
|
//stop the progress bar
|
|
|
|
ticker.Stop()
|
2021-06-18 04:47:08 +02:00
|
|
|
dlStopGrp.Stop()
|
2021-06-17 17:51:21 +02:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
if err != nil {
|
|
|
|
if strings.Contains(err.Error(), "exit status 1") {
|
|
|
|
if strings.Contains(string(errorLog), "HTTP Error 429") || strings.Contains(string(errorLog), "returned non-zero exit status 8") {
|
|
|
|
v.pool.SetThrottled(sourceAddress)
|
|
|
|
} else if strings.Contains(string(errorLog), "giving up after 0 fragment retries") {
|
|
|
|
if i == (len(qualities) - 1) {
|
|
|
|
return errors.Err(string(errorLog))
|
|
|
|
}
|
|
|
|
continue //this bypasses the yt throttling IP redistribution... TODO: don't
|
2020-08-18 00:03:38 +02:00
|
|
|
} else if strings.Contains(string(errorLog), "YouTube said: Unable to extract video data") && !strings.Contains(userAgent[1], "Googlebot") {
|
|
|
|
i-- //do not lower quality when trying a different user agent
|
2021-06-17 17:51:21 +02:00
|
|
|
userAgent = []string{downloader.GoogleBotUA}
|
2020-08-18 00:03:38 +02:00
|
|
|
log.Infof("trying different user agent for video %s", v.ID())
|
|
|
|
continue
|
2021-07-14 04:42:40 +02:00
|
|
|
//} else if strings.Contains(string(errorLog), "yt_dlp.utils.ThrottledDownload") {
|
|
|
|
// log.Infof("throttled download speed for video %s. Retrying", v.ID())
|
|
|
|
// i-- //do not lower quality when we're retrying a throttled download
|
|
|
|
// continue
|
2019-12-20 18:49:33 +01:00
|
|
|
}
|
|
|
|
return errors.Err(string(errorLog))
|
2019-07-12 21:32:49 +02:00
|
|
|
}
|
2019-12-20 18:49:33 +01:00
|
|
|
return errors.Err(err)
|
2019-07-11 19:14:15 +02:00
|
|
|
}
|
2019-12-20 18:49:33 +01:00
|
|
|
log.Debugln(string(outLog))
|
2019-07-11 19:14:15 +02:00
|
|
|
|
2019-12-20 18:49:33 +01:00
|
|
|
if strings.Contains(string(outLog), "does not pass filter duration") {
|
|
|
|
_ = v.delete("does not pass filter duration")
|
|
|
|
return errors.Err("video is too long to process")
|
|
|
|
}
|
|
|
|
if strings.Contains(string(outLog), "File is larger than max-filesize") {
|
|
|
|
_ = v.delete("File is larger than max-filesize")
|
|
|
|
return errors.Err("the video is too big to sync, skipping for now")
|
|
|
|
}
|
|
|
|
if string(errorLog) != "" {
|
2021-02-25 05:05:28 +01:00
|
|
|
if strings.Contains(string(errorLog), "HTTP Error 429") {
|
|
|
|
v.pool.SetThrottled(sourceAddress)
|
|
|
|
}
|
2019-12-20 18:49:33 +01:00
|
|
|
log.Printf("Command finished with error: %v", errors.Err(string(errorLog)))
|
|
|
|
_ = v.delete("due to error")
|
|
|
|
return errors.Err(string(errorLog))
|
|
|
|
}
|
|
|
|
fi, err := os.Stat(v.getFullPath())
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
err = os.Chmod(v.getFullPath(), 0777)
|
|
|
|
if err != nil {
|
|
|
|
return errors.Err(err)
|
|
|
|
}
|
|
|
|
videoSize := fi.Size()
|
|
|
|
v.size = &videoSize
|
|
|
|
break
|
2019-08-11 04:50:43 +02:00
|
|
|
}
|
2019-05-28 18:33:42 +02:00
|
|
|
return nil
|
|
|
|
}
|
2021-06-18 04:47:08 +02:00
|
|
|
func (v *YoutubeVideo) monitorSlowDownload(ticker *time.Ticker, stop *stop.Group, address string, cmd *exec.Cmd) {
|
|
|
|
count := 0
|
|
|
|
lastSize := int64(0)
|
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-stop.Ch():
|
|
|
|
return
|
|
|
|
case <-ticker.C:
|
|
|
|
size, err := logUtils.DirSize(v.videoDir())
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("error while getting size of download directory: %s", errors.FullTrace(err))
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
delta := size - lastSize
|
|
|
|
avgSpeed := delta / 10
|
|
|
|
if avgSpeed < 200*1024 { //200 KB/s
|
|
|
|
count++
|
|
|
|
} else {
|
|
|
|
count--
|
|
|
|
}
|
|
|
|
if count > 3 {
|
|
|
|
err := cmd.Process.Signal(syscall.SIGKILL)
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("failure in killing slow download: %s", errors.Err(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
func (v *YoutubeVideo) trackProgressBar(argsWithFilters []string, ticker *time.Ticker, metadata *ytMetadata, done *stop.Group, sourceAddress string) {
|
|
|
|
v.progressBarWg.Add(1)
|
|
|
|
go func() {
|
|
|
|
defer v.progressBarWg.Done()
|
|
|
|
//get size of the video before downloading
|
|
|
|
cmd := exec.Command("yt-dlp", append(argsWithFilters, "-s")...)
|
|
|
|
stdout, err := cmd.StdoutPipe()
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("error while getting final file size: %s", errors.FullTrace(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if err := cmd.Start(); err != nil {
|
|
|
|
log.Errorf("error while getting final file size: %s", errors.FullTrace(err))
|
|
|
|
return
|
|
|
|
}
|
|
|
|
outLog, _ := ioutil.ReadAll(stdout)
|
|
|
|
err = cmd.Wait()
|
|
|
|
output := string(outLog)
|
|
|
|
parts := strings.Split(output, ": ")
|
|
|
|
if len(parts) != 3 {
|
|
|
|
log.Errorf("couldn't parse audio and video parts from the output (%s)", output)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
formats := strings.Split(parts[2], "+")
|
|
|
|
if len(formats) != 2 {
|
|
|
|
log.Errorf("couldn't parse formats from the output (%s)", output)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
log.Debugf("'%s'", output)
|
|
|
|
videoFormat := formats[0]
|
|
|
|
audioFormat := strings.Replace(formats[1], "\n", "", -1)
|
|
|
|
|
|
|
|
videoSize := 0
|
|
|
|
audioSize := 0
|
|
|
|
if metadata != nil {
|
|
|
|
for _, f := range metadata.Formats {
|
|
|
|
if f.FormatID == videoFormat {
|
|
|
|
videoSize = f.Filesize
|
|
|
|
}
|
|
|
|
if f.FormatID == audioFormat {
|
|
|
|
audioSize = f.Filesize
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Debugf("(%s) - videoSize: %d (%s), audiosize: %d (%s)", v.id, videoSize, videoFormat, audioSize, audioFormat)
|
|
|
|
bar := v.progressBars.AddBar(int64(videoSize+audioSize),
|
|
|
|
mpb.PrependDecorators(
|
|
|
|
decor.CountersKibiByte("% .2f / % .2f "),
|
|
|
|
// simple name decorator
|
|
|
|
decor.Name(fmt.Sprintf("id: %s src-ip: (%s)", v.id, sourceAddress)),
|
|
|
|
// decor.DSyncWidth bit enables column width synchronization
|
|
|
|
decor.Percentage(decor.WCSyncSpace),
|
|
|
|
),
|
|
|
|
mpb.AppendDecorators(
|
|
|
|
decor.EwmaETA(decor.ET_STYLE_GO, 90),
|
|
|
|
decor.Name(" ] "),
|
|
|
|
decor.EwmaSpeed(decor.UnitKiB, "% .2f ", 60),
|
|
|
|
decor.OnComplete(
|
|
|
|
// ETA decorator with ewma age of 60
|
|
|
|
decor.EwmaETA(decor.ET_STYLE_GO, 60), "done",
|
|
|
|
),
|
|
|
|
),
|
|
|
|
mpb.BarRemoveOnComplete(),
|
|
|
|
)
|
|
|
|
defer func() {
|
|
|
|
bar.Completed()
|
|
|
|
bar.Abort(true)
|
|
|
|
}()
|
2022-05-04 17:13:03 +02:00
|
|
|
origSize := int64(0)
|
|
|
|
lastUpdate := time.Now()
|
2021-06-18 04:47:08 +02:00
|
|
|
for {
|
|
|
|
select {
|
|
|
|
case <-done.Ch():
|
|
|
|
return
|
|
|
|
case <-ticker.C:
|
2022-05-04 17:13:03 +02:00
|
|
|
var err error
|
2021-06-18 04:47:08 +02:00
|
|
|
size, err := logUtils.DirSize(v.videoDir())
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("error while getting size of download directory: %s", errors.FullTrace(err))
|
|
|
|
return
|
|
|
|
}
|
2022-05-04 17:13:03 +02:00
|
|
|
if size > origSize {
|
|
|
|
origSize = size
|
|
|
|
bar.SetCurrent(size)
|
|
|
|
if size > int64(videoSize+audioSize) {
|
|
|
|
bar.SetTotal(size+2048, false)
|
|
|
|
}
|
|
|
|
bar.DecoratorEwmaUpdate(time.Since(lastUpdate))
|
|
|
|
lastUpdate = time.Now()
|
2021-06-18 04:47:08 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}()
|
|
|
|
}
|
2019-05-28 18:33:42 +02:00
|
|
|
|
2021-06-17 17:51:21 +02:00
|
|
|
type ytMetadata struct {
|
|
|
|
ID string `json:"id"`
|
|
|
|
Title string `json:"title"`
|
|
|
|
Formats []struct {
|
|
|
|
Asr int `json:"asr"`
|
|
|
|
Filesize int `json:"filesize"`
|
|
|
|
FormatID string `json:"format_id"`
|
|
|
|
FormatNote string `json:"format_note"`
|
|
|
|
Fps interface{} `json:"fps"`
|
|
|
|
Height interface{} `json:"height"`
|
|
|
|
Quality int `json:"quality"`
|
|
|
|
Tbr float64 `json:"tbr"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Width interface{} `json:"width"`
|
|
|
|
Ext string `json:"ext"`
|
|
|
|
Vcodec string `json:"vcodec"`
|
|
|
|
Acodec string `json:"acodec"`
|
|
|
|
Abr float64 `json:"abr,omitempty"`
|
|
|
|
DownloaderOptions struct {
|
|
|
|
HTTPChunkSize int `json:"http_chunk_size"`
|
|
|
|
} `json:"downloader_options,omitempty"`
|
|
|
|
Container string `json:"container,omitempty"`
|
|
|
|
Format string `json:"format"`
|
|
|
|
Protocol string `json:"protocol"`
|
|
|
|
HTTPHeaders struct {
|
|
|
|
UserAgent string `json:"User-Agent"`
|
|
|
|
AcceptCharset string `json:"Accept-Charset"`
|
|
|
|
Accept string `json:"Accept"`
|
|
|
|
AcceptEncoding string `json:"Accept-Encoding"`
|
|
|
|
AcceptLanguage string `json:"Accept-Language"`
|
|
|
|
} `json:"http_headers"`
|
|
|
|
Vbr float64 `json:"vbr,omitempty"`
|
|
|
|
} `json:"formats"`
|
|
|
|
Thumbnails []struct {
|
|
|
|
Height int `json:"height"`
|
|
|
|
URL string `json:"url"`
|
|
|
|
Width int `json:"width"`
|
|
|
|
Resolution string `json:"resolution"`
|
|
|
|
ID string `json:"id"`
|
|
|
|
} `json:"thumbnails"`
|
|
|
|
Description string `json:"description"`
|
|
|
|
UploadDate string `json:"upload_date"`
|
|
|
|
Uploader string `json:"uploader"`
|
|
|
|
UploaderID string `json:"uploader_id"`
|
|
|
|
UploaderURL string `json:"uploader_url"`
|
|
|
|
ChannelID string `json:"channel_id"`
|
|
|
|
ChannelURL string `json:"channel_url"`
|
|
|
|
Duration int `json:"duration"`
|
|
|
|
ViewCount int `json:"view_count"`
|
|
|
|
AverageRating float64 `json:"average_rating"`
|
|
|
|
AgeLimit int `json:"age_limit"`
|
|
|
|
WebpageURL string `json:"webpage_url"`
|
|
|
|
Categories []string `json:"categories"`
|
|
|
|
Tags []interface{} `json:"tags"`
|
|
|
|
IsLive interface{} `json:"is_live"`
|
|
|
|
LikeCount int `json:"like_count"`
|
|
|
|
DislikeCount int `json:"dislike_count"`
|
|
|
|
Channel string `json:"channel"`
|
|
|
|
Extractor string `json:"extractor"`
|
|
|
|
WebpageURLBasename string `json:"webpage_url_basename"`
|
|
|
|
ExtractorKey string `json:"extractor_key"`
|
|
|
|
Playlist interface{} `json:"playlist"`
|
|
|
|
PlaylistIndex interface{} `json:"playlist_index"`
|
|
|
|
Thumbnail string `json:"thumbnail"`
|
|
|
|
DisplayID string `json:"display_id"`
|
|
|
|
Format string `json:"format"`
|
|
|
|
FormatID string `json:"format_id"`
|
|
|
|
Width int `json:"width"`
|
|
|
|
Height int `json:"height"`
|
|
|
|
Resolution interface{} `json:"resolution"`
|
|
|
|
Fps int `json:"fps"`
|
|
|
|
Vcodec string `json:"vcodec"`
|
|
|
|
Vbr float64 `json:"vbr"`
|
|
|
|
StretchedRatio interface{} `json:"stretched_ratio"`
|
|
|
|
Acodec string `json:"acodec"`
|
|
|
|
Abr float64 `json:"abr"`
|
|
|
|
Ext string `json:"ext"`
|
|
|
|
Fulltitle string `json:"fulltitle"`
|
|
|
|
Filename string `json:"_filename"`
|
|
|
|
}
|
|
|
|
|
|
|
|
func parseVideoMetadata(metadataPath string) (*ytMetadata, error) {
|
|
|
|
f, err := os.Open(metadataPath)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
// defer the closing of our jsonFile so that we can parse it later on
|
|
|
|
defer f.Close()
|
|
|
|
// read our opened jsonFile as a byte array.
|
|
|
|
byteValue, _ := ioutil.ReadAll(f)
|
|
|
|
|
|
|
|
// we initialize our Users array
|
|
|
|
var m ytMetadata
|
|
|
|
|
|
|
|
// we unmarshal our byteArray which contains our
|
|
|
|
// jsonFile's content into 'users' which we defined above
|
|
|
|
err = json.Unmarshal(byteValue, &m)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
return &m, nil
|
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) videoDir() string {
|
2021-06-17 17:51:21 +02:00
|
|
|
return path.Join(v.dir, v.id)
|
2018-07-24 02:01:35 +02:00
|
|
|
}
|
2021-06-17 17:51:21 +02:00
|
|
|
|
2019-05-28 18:33:42 +02:00
|
|
|
func (v *YoutubeVideo) getDownloadedPath() (string, error) {
|
|
|
|
files, err := ioutil.ReadDir(v.videoDir())
|
2019-05-28 22:40:31 +02:00
|
|
|
log.Infoln(v.videoDir())
|
2019-05-28 18:33:42 +02:00
|
|
|
if err != nil {
|
|
|
|
err = errors.Prefix("list error", err)
|
|
|
|
log.Errorln(err)
|
|
|
|
return "", err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, f := range files {
|
|
|
|
if f.IsDir() {
|
|
|
|
continue
|
|
|
|
}
|
2019-05-28 22:40:31 +02:00
|
|
|
if strings.Contains(v.getFullPath(), strings.TrimSuffix(f.Name(), filepath.Ext(f.Name()))) {
|
2021-06-17 17:51:21 +02:00
|
|
|
return path.Join(v.videoDir(), f.Name()), nil
|
2019-05-28 18:33:42 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
return "", errors.Err("could not find any downloaded videos")
|
2018-07-24 02:01:35 +02:00
|
|
|
|
2019-05-28 18:33:42 +02:00
|
|
|
}
|
2019-08-02 03:43:41 +02:00
|
|
|
func (v *YoutubeVideo) delete(reason string) error {
|
2019-05-28 18:33:42 +02:00
|
|
|
videoPath, err := v.getDownloadedPath()
|
|
|
|
if err != nil {
|
|
|
|
log.Errorln(err)
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
err = os.Remove(videoPath)
|
2019-08-02 03:43:41 +02:00
|
|
|
log.Debugf("%s deleted from disk for '%s' (%s)", v.id, reason, videoPath)
|
2019-05-28 18:33:42 +02:00
|
|
|
|
2018-04-25 20:56:26 +02:00
|
|
|
if err != nil {
|
2019-05-28 18:33:42 +02:00
|
|
|
err = errors.Prefix("delete error", err)
|
|
|
|
log.Errorln(err)
|
2018-04-25 20:56:26 +02:00
|
|
|
return err
|
|
|
|
}
|
2019-05-28 18:33:42 +02:00
|
|
|
|
2018-04-25 20:56:26 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2019-05-07 16:01:11 +02:00
|
|
|
func (v *YoutubeVideo) triggerThumbnailSave() (err error) {
|
2020-07-27 23:14:06 +02:00
|
|
|
thumbnail := thumbs.GetBestThumbnail(v.youtubeInfo.Thumbnails)
|
2021-06-25 19:04:40 +02:00
|
|
|
if thumbnail.Width == 0 {
|
|
|
|
return errors.Err("default youtube thumbnail found")
|
|
|
|
}
|
2021-11-24 05:54:08 +01:00
|
|
|
v.thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID())
|
2019-05-07 16:01:11 +02:00
|
|
|
return err
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
|
2019-06-06 02:16:07 +02:00
|
|
|
func (v *YoutubeVideo) publish(daemon *jsonrpc.Client, params SyncParams) (*SyncSummary, error) {
|
2020-05-19 23:13:01 +02:00
|
|
|
start := time.Now()
|
|
|
|
defer func(start time.Time) {
|
|
|
|
timing.TimedComponent("publish").Add(time.Since(start))
|
|
|
|
}(start)
|
2019-05-07 21:15:43 +02:00
|
|
|
languages, locations, tags := v.getMetadata()
|
2019-06-06 02:16:07 +02:00
|
|
|
var fee *jsonrpc.Fee
|
|
|
|
if params.Fee != nil {
|
|
|
|
feeAmount, err := decimal.NewFromString(params.Fee.Amount)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
fee = &jsonrpc.Fee{
|
|
|
|
FeeAddress: ¶ms.Fee.Address,
|
|
|
|
FeeAmount: feeAmount,
|
|
|
|
FeeCurrency: jsonrpc.Currency(params.Fee.Currency),
|
|
|
|
}
|
|
|
|
}
|
2021-03-25 19:07:26 +01:00
|
|
|
urlsRegex := regexp.MustCompile(`(?m) ?(f|ht)(tp)(s?)(://)(.*)[.|/](.*)`)
|
|
|
|
descriptionSample := urlsRegex.ReplaceAllString(v.description, "")
|
|
|
|
info := whatlanggo.Detect(descriptionSample)
|
2021-03-25 18:47:34 +01:00
|
|
|
info2 := whatlanggo.Detect(v.title)
|
2020-10-20 17:43:25 +02:00
|
|
|
if info.IsReliable() && info.Lang.Iso6391() != "" {
|
2020-09-21 19:12:23 +02:00
|
|
|
language := info.Lang.Iso6391()
|
|
|
|
languages = []string{language}
|
2021-03-25 18:47:34 +01:00
|
|
|
} else if info2.IsReliable() && info2.Lang.Iso6391() != "" {
|
|
|
|
language := info2.Lang.Iso6391()
|
|
|
|
languages = []string{language}
|
2020-09-21 19:12:23 +02:00
|
|
|
}
|
2019-04-19 03:22:51 +02:00
|
|
|
options := jsonrpc.StreamCreateOptions{
|
|
|
|
ClaimCreateOptions: jsonrpc.ClaimCreateOptions{
|
2019-06-04 22:21:40 +02:00
|
|
|
Title: &v.title,
|
|
|
|
Description: util.PtrToString(v.getAbbrevDescription()),
|
2019-06-06 02:16:07 +02:00
|
|
|
ClaimAddress: ¶ms.ClaimAddress,
|
2019-04-19 03:22:51 +02:00
|
|
|
Languages: languages,
|
2019-05-07 16:01:11 +02:00
|
|
|
ThumbnailURL: &v.thumbnailURL,
|
2019-05-07 21:15:43 +02:00
|
|
|
Tags: tags,
|
|
|
|
Locations: locations,
|
2020-01-12 04:01:40 +01:00
|
|
|
FundingAccountIDs: []string{
|
|
|
|
params.DefaultAccount,
|
|
|
|
},
|
2019-01-30 13:42:23 +01:00
|
|
|
},
|
2019-06-06 02:16:07 +02:00
|
|
|
Fee: fee,
|
2019-05-31 18:02:55 +02:00
|
|
|
License: util.PtrToString("Copyrighted (contact publisher)"),
|
2019-05-06 21:56:56 +02:00
|
|
|
ReleaseTime: util.PtrToInt64(v.publishedAt.Unix()),
|
2019-05-07 21:15:43 +02:00
|
|
|
ChannelID: &v.lbryChannelID,
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
2019-05-28 18:33:42 +02:00
|
|
|
downloadPath, err := v.getDownloadedPath()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2019-06-12 03:17:59 +02:00
|
|
|
return publishAndRetryExistingNames(daemon, v.title, downloadPath, params.Amount, options, params.Namer, v.walletLock)
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
|
2018-08-17 16:05:54 +02:00
|
|
|
func (v *YoutubeVideo) Size() *int64 {
|
2018-08-14 17:09:23 +02:00
|
|
|
return v.size
|
|
|
|
}
|
|
|
|
|
2019-05-03 05:11:52 +02:00
|
|
|
type SyncParams struct {
|
|
|
|
ClaimAddress string
|
|
|
|
Amount float64
|
|
|
|
ChannelID string
|
|
|
|
MaxVideoSize int
|
|
|
|
Namer *namer.Namer
|
2020-07-28 01:35:07 +02:00
|
|
|
MaxVideoLength time.Duration
|
2020-08-08 01:12:55 +02:00
|
|
|
Fee *shared.Fee
|
2020-01-12 04:01:40 +01:00
|
|
|
DefaultAccount string
|
2019-05-03 05:11:52 +02:00
|
|
|
}
|
|
|
|
|
2021-06-17 17:51:21 +02:00
|
|
|
func (v *YoutubeVideo) Sync(daemon *jsonrpc.Client, params SyncParams, existingVideoData *sdk.SyncedVideo, reprocess bool, walletLock *sync.RWMutex, pbWg *sync.WaitGroup, pb *mpb.Progress) (*SyncSummary, error) {
|
2019-07-12 01:04:45 +02:00
|
|
|
v.maxVideoSize = int64(params.MaxVideoSize)
|
2019-05-03 05:11:52 +02:00
|
|
|
v.maxVideoLength = params.MaxVideoLength
|
2019-05-07 21:15:43 +02:00
|
|
|
v.lbryChannelID = params.ChannelID
|
2019-06-12 03:35:21 +02:00
|
|
|
v.walletLock = walletLock
|
2021-06-17 17:51:21 +02:00
|
|
|
v.progressBars = pb
|
|
|
|
v.progressBarWg = pbWg
|
2019-05-07 22:07:44 +02:00
|
|
|
if reprocess && existingVideoData != nil && existingVideoData.Published {
|
2019-05-06 21:56:56 +02:00
|
|
|
summary, err := v.reprocess(daemon, params, existingVideoData)
|
2019-06-10 21:59:42 +02:00
|
|
|
return summary, errors.Prefix("upgrade failed", err)
|
2019-05-03 05:11:52 +02:00
|
|
|
}
|
2019-05-06 21:56:56 +02:00
|
|
|
return v.downloadAndPublish(daemon, params)
|
|
|
|
}
|
|
|
|
|
|
|
|
func (v *YoutubeVideo) downloadAndPublish(daemon *jsonrpc.Client, params SyncParams) (*SyncSummary, error) {
|
2019-12-14 14:58:04 +01:00
|
|
|
var err error
|
2020-07-27 23:14:06 +02:00
|
|
|
|
2020-07-28 01:35:07 +02:00
|
|
|
dur := time.Duration(v.youtubeInfo.Duration) * time.Second
|
2020-11-03 21:41:39 +01:00
|
|
|
minDuration := 7 * time.Second
|
2020-07-28 01:35:07 +02:00
|
|
|
|
2021-08-24 18:03:03 +02:00
|
|
|
if v.youtubeInfo.IsLive == true {
|
|
|
|
return nil, errors.Err("video is a live stream and hasn't completed yet")
|
|
|
|
}
|
2022-08-10 21:26:26 +02:00
|
|
|
if v.youtubeInfo.Availability != "public" {
|
|
|
|
return nil, errors.Err("video is not public")
|
|
|
|
}
|
2020-07-28 01:35:07 +02:00
|
|
|
if dur > v.maxVideoLength {
|
2020-08-03 07:40:59 +02:00
|
|
|
logUtils.SendErrorToSlack("%s is %s long and the limit is %s", v.id, dur.String(), v.maxVideoLength.String())
|
2020-03-12 19:22:23 +01:00
|
|
|
return nil, errors.Err("video is too long to process")
|
|
|
|
}
|
2020-11-03 21:41:39 +01:00
|
|
|
if dur < minDuration {
|
|
|
|
logUtils.SendErrorToSlack("%s is %s long and the minimum is %s", v.id, dur.String(), minDuration.String())
|
|
|
|
return nil, errors.Err("video is too short to process")
|
|
|
|
}
|
2022-08-10 21:26:26 +02:00
|
|
|
|
|
|
|
buggedLivestream := v.youtubeInfo.LiveStatus == "post_live"
|
2022-08-10 22:01:10 +02:00
|
|
|
if buggedLivestream && dur >= 2*time.Hour {
|
2022-08-10 21:55:29 +02:00
|
|
|
return nil, errors.Err("livestream is likely bugged as it was recently published and has a length of %s which is more than 2 hours", dur.String())
|
2021-12-29 23:47:46 +01:00
|
|
|
}
|
2019-12-14 14:58:04 +01:00
|
|
|
for {
|
|
|
|
err = v.download()
|
|
|
|
if err != nil && strings.Contains(err.Error(), "HTTP Error 429") {
|
|
|
|
continue
|
|
|
|
} else if err != nil {
|
2019-07-11 19:14:15 +02:00
|
|
|
return nil, errors.Prefix("download error", err)
|
|
|
|
}
|
2019-12-14 14:58:04 +01:00
|
|
|
break
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
2019-12-14 14:58:04 +01:00
|
|
|
|
2020-11-19 03:11:23 +01:00
|
|
|
ctx, cancelFn := context.WithTimeout(context.Background(), 5*time.Second)
|
|
|
|
defer cancelFn()
|
2017-12-28 18:14:33 +01:00
|
|
|
|
2020-11-19 03:11:23 +01:00
|
|
|
data, err := ffprobe.ProbeURL(ctx, v.getFullPath())
|
|
|
|
if err != nil {
|
|
|
|
log.Errorf("failure in probing downloaded video: %s", err.Error())
|
|
|
|
} else {
|
|
|
|
if data.Format.Duration() < minDuration {
|
|
|
|
return nil, errors.Err("video is too short to process")
|
|
|
|
}
|
|
|
|
}
|
2018-02-13 18:47:05 +01:00
|
|
|
err = v.triggerThumbnailSave()
|
2017-12-28 18:14:33 +01:00
|
|
|
if err != nil {
|
2018-07-21 01:56:36 +02:00
|
|
|
return nil, errors.Prefix("thumbnail error", err)
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
|
|
|
log.Debugln("Created thumbnail for " + v.id)
|
|
|
|
|
2019-06-06 02:16:07 +02:00
|
|
|
summary, err := v.publish(daemon, params)
|
2018-06-06 23:47:28 +02:00
|
|
|
//delete the video in all cases (and ignore the error)
|
2019-08-02 03:43:41 +02:00
|
|
|
_ = v.delete("finished download and publish")
|
2018-04-25 20:56:26 +02:00
|
|
|
|
2018-10-03 02:51:42 +02:00
|
|
|
return summary, errors.Prefix("publish error", err)
|
2017-12-28 18:14:33 +01:00
|
|
|
}
|
2019-05-03 05:11:52 +02:00
|
|
|
|
2019-05-07 21:15:43 +02:00
|
|
|
func (v *YoutubeVideo) getMetadata() (languages []string, locations []jsonrpc.Location, tags []string) {
|
|
|
|
languages = nil
|
2019-05-31 23:15:22 +02:00
|
|
|
locations = nil
|
|
|
|
tags = nil
|
|
|
|
if !v.mocked {
|
2020-07-27 23:14:06 +02:00
|
|
|
/*
|
|
|
|
if v.youtubeInfo.Snippet.DefaultLanguage != "" {
|
|
|
|
if v.youtubeInfo.Snippet.DefaultLanguage == "iw" {
|
|
|
|
v.youtubeInfo.Snippet.DefaultLanguage = "he"
|
|
|
|
}
|
|
|
|
languages = []string{v.youtubeInfo.Snippet.DefaultLanguage}
|
|
|
|
}*/
|
2019-05-31 23:15:22 +02:00
|
|
|
|
2020-07-27 23:14:06 +02:00
|
|
|
/*if v.youtubeInfo.!= nil && v.youtubeInfo.RecordingDetails.Location != nil {
|
2019-05-31 23:15:22 +02:00
|
|
|
locations = []jsonrpc.Location{{
|
|
|
|
Latitude: util.PtrToString(fmt.Sprintf("%.7f", v.youtubeInfo.RecordingDetails.Location.Latitude)),
|
|
|
|
Longitude: util.PtrToString(fmt.Sprintf("%.7f", v.youtubeInfo.RecordingDetails.Location.Longitude)),
|
|
|
|
}}
|
2020-07-27 23:14:06 +02:00
|
|
|
}*/
|
|
|
|
tags = v.youtubeInfo.Tags
|
2019-05-31 23:15:22 +02:00
|
|
|
}
|
2019-10-21 15:44:24 +02:00
|
|
|
tags, err := tags_manager.SanitizeTags(tags, v.youtubeChannelID)
|
2019-05-31 23:15:22 +02:00
|
|
|
if err != nil {
|
|
|
|
log.Errorln(err.Error())
|
|
|
|
}
|
|
|
|
if !v.mocked {
|
2020-07-27 23:14:06 +02:00
|
|
|
for _, category := range v.youtubeInfo.Categories {
|
|
|
|
tags = append(tags, youtubeCategories[category])
|
|
|
|
}
|
2019-05-07 21:15:43 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return languages, locations, tags
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:56:56 +02:00
|
|
|
func (v *YoutubeVideo) reprocess(daemon *jsonrpc.Client, params SyncParams, existingVideoData *sdk.SyncedVideo) (*SyncSummary, error) {
|
2022-05-04 17:13:03 +02:00
|
|
|
c, err := daemon.ClaimSearch(jsonrpc.ClaimSearchArgs{
|
|
|
|
ClaimID: &existingVideoData.ClaimID,
|
|
|
|
Page: 1,
|
|
|
|
PageSize: 20,
|
|
|
|
})
|
2019-05-03 05:11:52 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
if len(c.Claims) == 0 {
|
|
|
|
return nil, errors.Err("cannot reprocess: no claim found for this video")
|
|
|
|
} else if len(c.Claims) > 1 {
|
|
|
|
return nil, errors.Err("cannot reprocess: too many claims. claimID: %s", existingVideoData.ClaimID)
|
|
|
|
}
|
|
|
|
|
|
|
|
currentClaim := c.Claims[0]
|
2019-05-07 21:15:43 +02:00
|
|
|
languages, locations, tags := v.getMetadata()
|
2019-05-03 05:11:52 +02:00
|
|
|
|
|
|
|
thumbnailURL := ""
|
|
|
|
if currentClaim.Value.GetThumbnail() == nil {
|
2019-05-31 16:38:31 +02:00
|
|
|
if v.mocked {
|
|
|
|
return nil, errors.Err("could not find thumbnail for mocked video")
|
|
|
|
}
|
2020-07-27 23:14:06 +02:00
|
|
|
thumbnail := thumbs.GetBestThumbnail(v.youtubeInfo.Thumbnails)
|
2021-11-24 05:54:08 +01:00
|
|
|
thumbnailURL, err = thumbs.MirrorThumbnail(thumbnail.URL, v.ID())
|
2019-05-03 05:11:52 +02:00
|
|
|
} else {
|
|
|
|
thumbnailURL = thumbs.ThumbnailEndpoint + v.ID()
|
|
|
|
}
|
|
|
|
|
2019-05-06 21:56:56 +02:00
|
|
|
videoSize, err := currentClaim.GetStreamSizeByMagic()
|
|
|
|
if err != nil {
|
|
|
|
if existingVideoData.Size > 0 {
|
|
|
|
videoSize = uint64(existingVideoData.Size)
|
|
|
|
} else {
|
|
|
|
log.Infof("%s: the video must be republished as we can't get the right size", v.ID())
|
2019-08-30 19:35:04 +02:00
|
|
|
if !v.mocked {
|
|
|
|
_, err = daemon.StreamAbandon(currentClaim.Txid, currentClaim.Nout, nil, true)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
return v.downloadAndPublish(daemon, params)
|
|
|
|
}
|
2021-08-24 17:54:38 +02:00
|
|
|
return nil, errors.Prefix("the video must be republished as we can't get the right size and it doesn't exist on youtube anymore", err)
|
2019-05-06 21:56:56 +02:00
|
|
|
}
|
|
|
|
}
|
2019-06-06 23:25:31 +02:00
|
|
|
v.size = util.PtrToInt64(int64(videoSize))
|
2019-06-06 02:16:07 +02:00
|
|
|
var fee *jsonrpc.Fee
|
|
|
|
if params.Fee != nil {
|
|
|
|
feeAmount, err := decimal.NewFromString(params.Fee.Amount)
|
|
|
|
if err != nil {
|
|
|
|
return nil, errors.Err(err)
|
|
|
|
}
|
|
|
|
fee = &jsonrpc.Fee{
|
|
|
|
FeeAddress: ¶ms.Fee.Address,
|
|
|
|
FeeAmount: feeAmount,
|
|
|
|
FeeCurrency: jsonrpc.Currency(params.Fee.Currency),
|
|
|
|
}
|
|
|
|
}
|
|
|
|
streamCreateOptions := &jsonrpc.StreamCreateOptions{
|
|
|
|
ClaimCreateOptions: jsonrpc.ClaimCreateOptions{
|
|
|
|
Tags: tags,
|
|
|
|
ThumbnailURL: &thumbnailURL,
|
|
|
|
Languages: languages,
|
|
|
|
Locations: locations,
|
2020-01-12 04:01:40 +01:00
|
|
|
FundingAccountIDs: []string{
|
|
|
|
params.DefaultAccount,
|
|
|
|
},
|
2019-06-06 02:16:07 +02:00
|
|
|
},
|
2022-08-15 21:19:20 +02:00
|
|
|
Author: util.PtrToString(""),
|
|
|
|
License: util.PtrToString("Copyrighted (contact publisher)"),
|
|
|
|
ChannelID: &v.lbryChannelID,
|
|
|
|
Height: util.PtrToUint(720),
|
|
|
|
Width: util.PtrToUint(1280),
|
|
|
|
Fee: fee,
|
|
|
|
ReleaseTime: util.PtrToInt64(v.publishedAt.Unix()),
|
2019-06-06 02:16:07 +02:00
|
|
|
}
|
2019-05-07 21:15:43 +02:00
|
|
|
|
2019-06-12 03:17:59 +02:00
|
|
|
v.walletLock.RLock()
|
|
|
|
defer v.walletLock.RUnlock()
|
2019-05-31 16:38:31 +02:00
|
|
|
if v.mocked {
|
2020-05-19 23:13:01 +02:00
|
|
|
start := time.Now()
|
2019-05-31 16:38:31 +02:00
|
|
|
pr, err := daemon.StreamUpdate(existingVideoData.ClaimID, jsonrpc.StreamUpdateOptions{
|
2019-06-06 02:16:07 +02:00
|
|
|
StreamCreateOptions: streamCreateOptions,
|
|
|
|
FileSize: &videoSize,
|
2019-05-31 16:38:31 +02:00
|
|
|
})
|
2020-05-19 23:13:01 +02:00
|
|
|
timing.TimedComponent("StreamUpdate").Add(time.Since(start))
|
2019-05-31 16:38:31 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &SyncSummary{
|
|
|
|
ClaimID: pr.Outputs[0].ClaimID,
|
|
|
|
ClaimName: pr.Outputs[0].Name,
|
|
|
|
}, nil
|
|
|
|
}
|
2019-06-01 01:46:16 +02:00
|
|
|
|
2019-06-06 02:16:07 +02:00
|
|
|
streamCreateOptions.ClaimCreateOptions.Title = &v.title
|
|
|
|
streamCreateOptions.ClaimCreateOptions.Description = util.PtrToString(v.getAbbrevDescription())
|
2020-07-27 23:14:06 +02:00
|
|
|
streamCreateOptions.Duration = util.PtrToUint64(uint64(v.youtubeInfo.Duration))
|
2019-06-06 02:16:07 +02:00
|
|
|
streamCreateOptions.ReleaseTime = util.PtrToInt64(v.publishedAt.Unix())
|
2020-05-19 23:13:01 +02:00
|
|
|
start := time.Now()
|
2019-05-07 16:01:11 +02:00
|
|
|
pr, err := daemon.StreamUpdate(existingVideoData.ClaimID, jsonrpc.StreamUpdateOptions{
|
2019-06-06 02:16:07 +02:00
|
|
|
ClearLanguages: util.PtrToBool(true),
|
|
|
|
ClearLocations: util.PtrToBool(true),
|
|
|
|
ClearTags: util.PtrToBool(true),
|
|
|
|
StreamCreateOptions: streamCreateOptions,
|
|
|
|
FileSize: &videoSize,
|
2019-05-03 05:11:52 +02:00
|
|
|
})
|
2020-05-19 23:13:01 +02:00
|
|
|
timing.TimedComponent("StreamUpdate").Add(time.Since(start))
|
2019-05-07 16:01:11 +02:00
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &SyncSummary{
|
2019-05-08 23:12:13 +02:00
|
|
|
ClaimID: pr.Outputs[0].ClaimID,
|
|
|
|
ClaimName: pr.Outputs[0].Name,
|
2019-05-07 16:01:11 +02:00
|
|
|
}, nil
|
2019-05-03 05:11:52 +02:00
|
|
|
}
|