forked from LBRYCommunity/lbry-sdk
ogg -> ogv, ignore files that aren't video
This commit is contained in:
parent
75a1cc0d33
commit
19ce0ab246
5 changed files with 20 additions and 16 deletions
|
@ -3142,7 +3142,10 @@ class Daemon(metaclass=JSONRPCServerType):
|
||||||
f"Use --allow-duplicate-name flag to override."
|
f"Use --allow-duplicate-name flag to override."
|
||||||
)
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
file_path = await self._video_file_analyzer.verify_or_repair(validate_file, optimize_file, file_path)
|
file_path = await self._video_file_analyzer.verify_or_repair(validate_file, optimize_file, file_path)
|
||||||
|
except ValueError:
|
||||||
|
pass # it's not a video file
|
||||||
|
|
||||||
claim = Claim()
|
claim = Claim()
|
||||||
claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs)
|
claim.stream.update(file_path=file_path, sd_hash='0' * 96, **kwargs)
|
||||||
|
|
|
@ -257,12 +257,12 @@ class VideoFileAnalyzer:
|
||||||
continue
|
continue
|
||||||
codec = stream["codec_name"].split(",")
|
codec = stream["codec_name"].split(",")
|
||||||
if "theora" in codec:
|
if "theora" in codec:
|
||||||
return "ogg"
|
return "ogv"
|
||||||
if {"vp8", "vp9", "av1"}.intersection(codec):
|
if {"vp8", "vp9", "av1"}.intersection(codec):
|
||||||
return "webm"
|
return "webm"
|
||||||
|
|
||||||
if "theora" in video_encoder:
|
if "theora" in video_encoder:
|
||||||
return "ogg"
|
return "ogv"
|
||||||
elif re.search(r"vp[89x]|av1", video_encoder.split(" ", 1)[0]):
|
elif re.search(r"vp[89x]|av1", video_encoder.split(" ", 1)[0]):
|
||||||
return "webm"
|
return "webm"
|
||||||
return "mp4"
|
return "mp4"
|
||||||
|
@ -274,16 +274,15 @@ class VideoFileAnalyzer:
|
||||||
scan_data = json.loads(result)
|
scan_data = json.loads(result)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
log.debug("Failure in JSON parsing ffprobe results. Message: %s", str(e))
|
log.debug("Failure in JSON parsing ffprobe results. Message: %s", str(e))
|
||||||
if validate:
|
raise ValueError(f'Absent or unreadable video file: {file_path}')
|
||||||
raise Exception(f'Invalid video file: {file_path}')
|
|
||||||
log.info("Unable to optimize %s . FFmpeg output was unreadable.", file_path)
|
|
||||||
return
|
|
||||||
|
|
||||||
if "format" not in scan_data:
|
if "format" not in scan_data or "duration" not in scan_data["format"]:
|
||||||
if validate:
|
log.debug("Format data is missing from ffprobe results for: %s", file_path)
|
||||||
raise FileNotFoundError(f'Unexpected or absent video file contents at: {file_path}')
|
raise ValueError(f'Media file does not appear to contain video content at: {file_path}')
|
||||||
log.info("Unable to optimize %s . FFmpeg output is missing the format section.", file_path)
|
|
||||||
return
|
if float(scan_data["format"]["duration"]) < 0.1:
|
||||||
|
log.debug("Media file appears to be an image: %s", file_path)
|
||||||
|
raise ValueError(f'Assuming image file at: {file_path}')
|
||||||
|
|
||||||
return scan_data
|
return scan_data
|
||||||
|
|
||||||
|
|
|
@ -148,6 +148,7 @@ types_map = {
|
||||||
'.mobi': ('application/x-mobipocket-ebook', 'document'),
|
'.mobi': ('application/x-mobipocket-ebook', 'document'),
|
||||||
'.oga': ('audio/ogg', 'audio'),
|
'.oga': ('audio/ogg', 'audio'),
|
||||||
'.ogv': ('video/ogg', 'video'),
|
'.ogv': ('video/ogg', 'video'),
|
||||||
|
'.ogg': ('video/ogg', 'video'),
|
||||||
'.pct': ('image/pict', 'image'),
|
'.pct': ('image/pict', 'image'),
|
||||||
'.pic': ('image/pict', 'image'),
|
'.pic': ('image/pict', 'image'),
|
||||||
'.pict': ('image/pict', 'image'),
|
'.pict': ('image/pict', 'image'),
|
||||||
|
@ -162,6 +163,7 @@ types_map = {
|
||||||
|
|
||||||
|
|
||||||
def guess_media_type(path):
|
def guess_media_type(path):
|
||||||
|
# should we be using "file --mime-type -b $filename" on linux?
|
||||||
_, ext = os.path.splitext(path)
|
_, ext = os.path.splitext(path)
|
||||||
extension = ext.strip().lower()
|
extension = ext.strip().lower()
|
||||||
if extension[1:]:
|
if extension[1:]:
|
||||||
|
|
|
@ -26,8 +26,8 @@ async def process_video(analyzer, video_file):
|
||||||
try:
|
try:
|
||||||
await analyzer.verify_or_repair(True, False, video_file)
|
await analyzer.verify_or_repair(True, False, video_file)
|
||||||
print("No concerns. Ship it!")
|
print("No concerns. Ship it!")
|
||||||
except FileNotFoundError as e:
|
except (FileNotFoundError, ValueError) as e:
|
||||||
print(str(e))
|
print("Analysis failed.", str(e))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(str(e))
|
print(str(e))
|
||||||
transcode = input("Would you like to make a repaired clone now? [y/N] ")
|
transcode = input("Would you like to make a repaired clone now? [y/N] ")
|
||||||
|
|
|
@ -130,7 +130,7 @@ class TranscodeValidation(ClaimTestCase):
|
||||||
|
|
||||||
scan_data = await self.analyzer._get_scan_data(True, self.video_file_ogg)
|
scan_data = await self.analyzer._get_scan_data(True, self.video_file_ogg)
|
||||||
extension = self.analyzer._get_best_container_extension(scan_data, "")
|
extension = self.analyzer._get_best_container_extension(scan_data, "")
|
||||||
self.assertEqual(extension, "ogg")
|
self.assertEqual(extension, "ogv")
|
||||||
|
|
||||||
scan_data = await self.analyzer._get_scan_data(True, self.video_file_webm)
|
scan_data = await self.analyzer._get_scan_data(True, self.video_file_webm)
|
||||||
extension = self.analyzer._get_best_container_extension(scan_data, "")
|
extension = self.analyzer._get_best_container_extension(scan_data, "")
|
||||||
|
@ -143,7 +143,7 @@ class TranscodeValidation(ClaimTestCase):
|
||||||
self.assertEqual("webm", extension)
|
self.assertEqual("webm", extension)
|
||||||
|
|
||||||
extension = self.analyzer._get_best_container_extension("", "libtheora")
|
extension = self.analyzer._get_best_container_extension("", "libtheora")
|
||||||
self.assertEqual("ogg", extension)
|
self.assertEqual("ogv", extension)
|
||||||
|
|
||||||
async def test_no_ffmpeg(self):
|
async def test_no_ffmpeg(self):
|
||||||
self.conf.ffmpeg_folder = "I don't really exist/"
|
self.conf.ffmpeg_folder = "I don't really exist/"
|
||||||
|
|
Loading…
Reference in a new issue