fix update not picking up fresh accounts

fix huge videos clogging up the queue
This commit is contained in:
Niko Storni 2018-10-11 17:21:05 -04:00
parent ea06ed54a6
commit a7b2bc816d
No known key found for this signature in database
GPG key ID: F37FE63398800368
4 changed files with 8 additions and 2 deletions

View file

@ -57,7 +57,7 @@ func main() {
cmd.Flags().StringVar(&syncStatus, "status", "", "Specify which queue to pull from. Overrides --update") cmd.Flags().StringVar(&syncStatus, "status", "", "Specify which queue to pull from. Overrides --update")
cmd.Flags().StringVar(&channelID, "channelID", "", "If specified, only this channel will be synced.") cmd.Flags().StringVar(&channelID, "channelID", "", "If specified, only this channel will be synced.")
cmd.Flags().Int64Var(&syncFrom, "after", time.Unix(0, 0).Unix(), "Specify from when to pull jobs [Unix time](Default: 0)") cmd.Flags().Int64Var(&syncFrom, "after", time.Unix(0, 0).Unix(), "Specify from when to pull jobs [Unix time](Default: 0)")
cmd.Flags().Int64Var(&syncUntil, "before", time.Now().Unix(), "Specify until when to pull jobs [Unix time](Default: current Unix time)") cmd.Flags().Int64Var(&syncUntil, "before", time.Now().AddDate(1, 0, 0).Unix(), "Specify until when to pull jobs [Unix time](Default: current Unix time)")
cmd.Flags().IntVar(&concurrentJobs, "concurrent-jobs", 1, "how many jobs to process concurrently") cmd.Flags().IntVar(&concurrentJobs, "concurrent-jobs", 1, "how many jobs to process concurrently")
cmd.Flags().IntVar(&videosLimit, "videos-limit", 1000, "how many videos to process per channel") cmd.Flags().IntVar(&videosLimit, "videos-limit", 1000, "how many videos to process per channel")
cmd.Flags().IntVar(&maxVideoSize, "max-size", 2048, "Maximum video size to process (in MB)") cmd.Flags().IntVar(&maxVideoSize, "max-size", 2048, "Maximum video size to process (in MB)")

View file

@ -14,7 +14,7 @@ import (
) )
const ( const (
MaxReasonLength = 500 MaxReasonLength = 490
) )
type APIConfig struct { type APIConfig struct {

View file

@ -142,6 +142,10 @@ func (v *YoutubeVideo) download() error {
return errors.Err("no compatible format available for this video") return errors.Err("no compatible format available for this video")
} }
maxRetryAttempts := 5 maxRetryAttempts := 5
if videoInfo.Duration.Hours() > 2 {
return errors.Err("video is too long to process")
}
for i := 0; i < len(formats) && i < maxRetryAttempts; i++ { for i := 0; i < len(formats) && i < maxRetryAttempts; i++ {
formatIndex := i formatIndex := i
if i == maxRetryAttempts-1 { if i == maxRetryAttempts-1 {

View file

@ -564,6 +564,7 @@ func (s *Sync) startWorker(workerNum int) {
"Unable to extract signature tokens", "Unable to extract signature tokens",
"Client.Timeout exceeded while awaiting headers)", "Client.Timeout exceeded while awaiting headers)",
"the video is too big to sync, skipping for now", "the video is too big to sync, skipping for now",
"video is too long to process",
} }
if util.SubstringInSlice(err.Error(), errorsNoRetry) { if util.SubstringInSlice(err.Error(), errorsNoRetry) {
log.Println("This error should not be retried at all") log.Println("This error should not be retried at all")
@ -760,6 +761,7 @@ func (s *Sync) processVideo(v video) (err error) {
"Error extracting sts from embedded url response", "Error extracting sts from embedded url response",
"Unable to extract signature tokens", "Unable to extract signature tokens",
"the video is too big to sync, skipping for now", "the video is too big to sync, skipping for now",
"video is too long to process",
} }
if ok && !sv.Published && util.SubstringInSlice(sv.FailureReason, neverRetryFailures) { if ok && !sv.Published && util.SubstringInSlice(sv.FailureReason, neverRetryFailures) {
log.Println(v.ID() + " can't ever be published") log.Println(v.ID() + " can't ever be published")