merge redis backend branch into testing
commit
8cf41a4043
@ -1,54 +0,0 @@
|
|||||||
"""
|
|
||||||
Functionality:
|
|
||||||
- process tasks from API
|
|
||||||
- validate
|
|
||||||
- handover to celery
|
|
||||||
"""
|
|
||||||
|
|
||||||
from home.src.ta.ta_redis import RedisArchivist
|
|
||||||
from home.tasks import download_pending, update_subscribed
|
|
||||||
|
|
||||||
|
|
||||||
class TaskHandler:
|
|
||||||
"""handle tasks from api"""
|
|
||||||
|
|
||||||
def __init__(self, data):
|
|
||||||
self.data = data
|
|
||||||
|
|
||||||
def run_task(self):
|
|
||||||
"""map data and run"""
|
|
||||||
task_name = self.data["run"]
|
|
||||||
try:
|
|
||||||
to_run = self.exec_map(task_name)
|
|
||||||
except KeyError as err:
|
|
||||||
print(f"invalid task name {task_name}")
|
|
||||||
raise ValueError from err
|
|
||||||
|
|
||||||
response = to_run()
|
|
||||||
response.update({"task": task_name})
|
|
||||||
return response
|
|
||||||
|
|
||||||
def exec_map(self, task_name):
|
|
||||||
"""map dict key and return function to execute"""
|
|
||||||
exec_map = {
|
|
||||||
"download_pending": self._download_pending,
|
|
||||||
"rescan_pending": self._rescan_pending,
|
|
||||||
}
|
|
||||||
|
|
||||||
return exec_map[task_name]
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _rescan_pending():
|
|
||||||
"""look for new items in subscribed channels"""
|
|
||||||
print("rescan subscribed channels")
|
|
||||||
update_subscribed.delay()
|
|
||||||
return {"success": True}
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _download_pending():
|
|
||||||
"""start the download queue"""
|
|
||||||
print("download pending")
|
|
||||||
running = download_pending.delay()
|
|
||||||
print("set task id: " + running.id)
|
|
||||||
RedisArchivist().set_message("dl_queue_id", running.id)
|
|
||||||
return {"success": True}
|
|
@ -1,144 +1,134 @@
|
|||||||
"""all api urls"""
|
"""all api urls"""
|
||||||
|
|
||||||
from api.views import (
|
from api import views
|
||||||
ChannelApiListView,
|
|
||||||
ChannelApiVideoView,
|
|
||||||
ChannelApiView,
|
|
||||||
CookieView,
|
|
||||||
DownloadApiListView,
|
|
||||||
DownloadApiView,
|
|
||||||
LoginApiView,
|
|
||||||
PingView,
|
|
||||||
PlaylistApiListView,
|
|
||||||
PlaylistApiVideoView,
|
|
||||||
PlaylistApiView,
|
|
||||||
RefreshView,
|
|
||||||
SearchView,
|
|
||||||
SnapshotApiListView,
|
|
||||||
SnapshotApiView,
|
|
||||||
TaskApiView,
|
|
||||||
TokenView,
|
|
||||||
VideoApiListView,
|
|
||||||
VideoApiView,
|
|
||||||
VideoCommentView,
|
|
||||||
VideoProgressView,
|
|
||||||
VideoSimilarView,
|
|
||||||
VideoSponsorView,
|
|
||||||
WatchedView,
|
|
||||||
)
|
|
||||||
from django.urls import path
|
from django.urls import path
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("ping/", PingView.as_view(), name="ping"),
|
path("ping/", views.PingView.as_view(), name="ping"),
|
||||||
path("login/", LoginApiView.as_view(), name="api-login"),
|
path("login/", views.LoginApiView.as_view(), name="api-login"),
|
||||||
path(
|
path(
|
||||||
"video/",
|
"video/",
|
||||||
VideoApiListView.as_view(),
|
views.VideoApiListView.as_view(),
|
||||||
name="api-video-list",
|
name="api-video-list",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"video/<slug:video_id>/",
|
"video/<slug:video_id>/",
|
||||||
VideoApiView.as_view(),
|
views.VideoApiView.as_view(),
|
||||||
name="api-video",
|
name="api-video",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"video/<slug:video_id>/progress/",
|
"video/<slug:video_id>/progress/",
|
||||||
VideoProgressView.as_view(),
|
views.VideoProgressView.as_view(),
|
||||||
name="api-video-progress",
|
name="api-video-progress",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"video/<slug:video_id>/comment/",
|
"video/<slug:video_id>/comment/",
|
||||||
VideoCommentView.as_view(),
|
views.VideoCommentView.as_view(),
|
||||||
name="api-video-comment",
|
name="api-video-comment",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"video/<slug:video_id>/similar/",
|
"video/<slug:video_id>/similar/",
|
||||||
VideoSimilarView.as_view(),
|
views.VideoSimilarView.as_view(),
|
||||||
name="api-video-similar",
|
name="api-video-similar",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"video/<slug:video_id>/sponsor/",
|
"video/<slug:video_id>/sponsor/",
|
||||||
VideoSponsorView.as_view(),
|
views.VideoSponsorView.as_view(),
|
||||||
name="api-video-sponsor",
|
name="api-video-sponsor",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"channel/",
|
"channel/",
|
||||||
ChannelApiListView.as_view(),
|
views.ChannelApiListView.as_view(),
|
||||||
name="api-channel-list",
|
name="api-channel-list",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"channel/<slug:channel_id>/",
|
"channel/<slug:channel_id>/",
|
||||||
ChannelApiView.as_view(),
|
views.ChannelApiView.as_view(),
|
||||||
name="api-channel",
|
name="api-channel",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"channel/<slug:channel_id>/video/",
|
"channel/<slug:channel_id>/video/",
|
||||||
ChannelApiVideoView.as_view(),
|
views.ChannelApiVideoView.as_view(),
|
||||||
name="api-channel-video",
|
name="api-channel-video",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"playlist/",
|
"playlist/",
|
||||||
PlaylistApiListView.as_view(),
|
views.PlaylistApiListView.as_view(),
|
||||||
name="api-playlist-list",
|
name="api-playlist-list",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"playlist/<slug:playlist_id>/",
|
"playlist/<slug:playlist_id>/",
|
||||||
PlaylistApiView.as_view(),
|
views.PlaylistApiView.as_view(),
|
||||||
name="api-playlist",
|
name="api-playlist",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"playlist/<slug:playlist_id>/video/",
|
"playlist/<slug:playlist_id>/video/",
|
||||||
PlaylistApiVideoView.as_view(),
|
views.PlaylistApiVideoView.as_view(),
|
||||||
name="api-playlist-video",
|
name="api-playlist-video",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"download/",
|
"download/",
|
||||||
DownloadApiListView.as_view(),
|
views.DownloadApiListView.as_view(),
|
||||||
name="api-download-list",
|
name="api-download-list",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"download/<slug:video_id>/",
|
"download/<slug:video_id>/",
|
||||||
DownloadApiView.as_view(),
|
views.DownloadApiView.as_view(),
|
||||||
name="api-download",
|
name="api-download",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"refresh/",
|
"refresh/",
|
||||||
RefreshView.as_view(),
|
views.RefreshView.as_view(),
|
||||||
name="api-refresh",
|
name="api-refresh",
|
||||||
),
|
),
|
||||||
path(
|
|
||||||
"task/",
|
|
||||||
TaskApiView.as_view(),
|
|
||||||
name="api-task",
|
|
||||||
),
|
|
||||||
path(
|
path(
|
||||||
"snapshot/",
|
"snapshot/",
|
||||||
SnapshotApiListView.as_view(),
|
views.SnapshotApiListView.as_view(),
|
||||||
name="api-snapshot-list",
|
name="api-snapshot-list",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"snapshot/<slug:snapshot_id>/",
|
"snapshot/<slug:snapshot_id>/",
|
||||||
SnapshotApiView.as_view(),
|
views.SnapshotApiView.as_view(),
|
||||||
name="api-snapshot",
|
name="api-snapshot",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"task-name/",
|
||||||
|
views.TaskListView.as_view(),
|
||||||
|
name="api-task-list",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"task-name/<slug:task_name>/",
|
||||||
|
views.TaskNameListView.as_view(),
|
||||||
|
name="api-task-name-list",
|
||||||
|
),
|
||||||
|
path(
|
||||||
|
"task-id/<slug:task_id>/",
|
||||||
|
views.TaskIDView.as_view(),
|
||||||
|
name="api-task-id",
|
||||||
|
),
|
||||||
path(
|
path(
|
||||||
"cookie/",
|
"cookie/",
|
||||||
CookieView.as_view(),
|
views.CookieView.as_view(),
|
||||||
name="api-cookie",
|
name="api-cookie",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"watched/",
|
"watched/",
|
||||||
WatchedView.as_view(),
|
views.WatchedView.as_view(),
|
||||||
name="api-watched",
|
name="api-watched",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"search/",
|
"search/",
|
||||||
SearchView.as_view(),
|
views.SearchView.as_view(),
|
||||||
name="api-search",
|
name="api-search",
|
||||||
),
|
),
|
||||||
path(
|
path(
|
||||||
"token/",
|
"token/",
|
||||||
TokenView.as_view(),
|
views.TokenView.as_view(),
|
||||||
name="api-token",
|
name="api-token",
|
||||||
),
|
),
|
||||||
|
path(
|
||||||
|
"notification/",
|
||||||
|
views.NotificationView.as_view(),
|
||||||
|
name="api-notification",
|
||||||
|
),
|
||||||
]
|
]
|
||||||
|
@ -0,0 +1,466 @@
|
|||||||
|
"""
|
||||||
|
Functionality:
|
||||||
|
- Handle manual import task
|
||||||
|
- Scan and identify media files in import folder
|
||||||
|
- Process import media files
|
||||||
|
"""
|
||||||
|
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import shutil
|
||||||
|
import subprocess
|
||||||
|
|
||||||
|
from home.src.download.thumbnails import ThumbManager
|
||||||
|
from home.src.index.video import YoutubeVideo
|
||||||
|
from home.src.ta.config import AppConfig
|
||||||
|
from home.src.ta.helper import ignore_filelist
|
||||||
|
from PIL import Image
|
||||||
|
from yt_dlp.utils import ISO639Utils
|
||||||
|
|
||||||
|
|
||||||
|
class ImportFolderScanner:
|
||||||
|
"""import and indexing existing video files
|
||||||
|
- identify all media files belonging to a video
|
||||||
|
- identify youtube id
|
||||||
|
- convert if needed
|
||||||
|
"""
|
||||||
|
|
||||||
|
CONFIG = AppConfig().config
|
||||||
|
CACHE_DIR = CONFIG["application"]["cache_dir"]
|
||||||
|
IMPORT_DIR = os.path.join(CACHE_DIR, "import")
|
||||||
|
|
||||||
|
EXT_MAP = {
|
||||||
|
"media": [".mp4", ".mkv", ".webm"],
|
||||||
|
"metadata": [".json"],
|
||||||
|
"thumb": [".jpg", ".png", ".webp"],
|
||||||
|
"subtitle": [".vtt"],
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, task=False):
|
||||||
|
self.task = task
|
||||||
|
self.to_import = False
|
||||||
|
|
||||||
|
def scan(self):
|
||||||
|
"""scan and match media files"""
|
||||||
|
if self.task:
|
||||||
|
self.task.send_progress(["Scanning your import folder."])
|
||||||
|
|
||||||
|
all_files = self.get_all_files()
|
||||||
|
self.match_files(all_files)
|
||||||
|
self.process_videos()
|
||||||
|
|
||||||
|
return self.to_import
|
||||||
|
|
||||||
|
def get_all_files(self):
|
||||||
|
"""get all files in /import"""
|
||||||
|
rel_paths = ignore_filelist(os.listdir(self.IMPORT_DIR))
|
||||||
|
all_files = [os.path.join(self.IMPORT_DIR, i) for i in rel_paths]
|
||||||
|
all_files.sort()
|
||||||
|
|
||||||
|
return all_files
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_template():
|
||||||
|
"""base dict for video"""
|
||||||
|
return {
|
||||||
|
"media": False,
|
||||||
|
"video_id": False,
|
||||||
|
"metadata": False,
|
||||||
|
"thumb": False,
|
||||||
|
"subtitle": [],
|
||||||
|
}
|
||||||
|
|
||||||
|
def match_files(self, all_files):
|
||||||
|
"""loop through all files, join what matches"""
|
||||||
|
self.to_import = []
|
||||||
|
|
||||||
|
current_video = self._get_template()
|
||||||
|
last_base = False
|
||||||
|
|
||||||
|
for file_path in all_files:
|
||||||
|
base_name, ext = self._detect_base_name(file_path)
|
||||||
|
key, file_path = self._detect_type(file_path, ext)
|
||||||
|
if not key or not file_path:
|
||||||
|
continue
|
||||||
|
|
||||||
|
if base_name != last_base:
|
||||||
|
if last_base:
|
||||||
|
print(f"manual import: {current_video}")
|
||||||
|
self.to_import.append(current_video)
|
||||||
|
|
||||||
|
current_video = self._get_template()
|
||||||
|
last_base = base_name
|
||||||
|
|
||||||
|
if key == "subtitle":
|
||||||
|
current_video["subtitle"].append(file_path)
|
||||||
|
else:
|
||||||
|
current_video[key] = file_path
|
||||||
|
|
||||||
|
if current_video.get("media"):
|
||||||
|
print(f"manual import: {current_video}")
|
||||||
|
self.to_import.append(current_video)
|
||||||
|
|
||||||
|
def _detect_base_name(self, file_path):
|
||||||
|
"""extract base_name and ext for matching"""
|
||||||
|
base_name_raw, ext = os.path.splitext(file_path)
|
||||||
|
base_name, ext2 = os.path.splitext(base_name_raw)
|
||||||
|
|
||||||
|
if ext2:
|
||||||
|
if ISO639Utils.short2long(ext2.strip(".")) or ext2 == ".info":
|
||||||
|
# valid secondary extension
|
||||||
|
return base_name, ext
|
||||||
|
|
||||||
|
return base_name_raw, ext
|
||||||
|
|
||||||
|
def _detect_type(self, file_path, ext):
|
||||||
|
"""detect metadata type for file"""
|
||||||
|
|
||||||
|
for key, value in self.EXT_MAP.items():
|
||||||
|
if ext in value:
|
||||||
|
return key, file_path
|
||||||
|
|
||||||
|
return False, False
|
||||||
|
|
||||||
|
def process_videos(self):
|
||||||
|
"""loop through all videos"""
|
||||||
|
for idx, current_video in enumerate(self.to_import):
|
||||||
|
if not current_video["media"]:
|
||||||
|
print(f"{current_video}: no matching media file found.")
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
if self.task:
|
||||||
|
self._notify(idx, current_video)
|
||||||
|
|
||||||
|
self._detect_youtube_id(current_video)
|
||||||
|
self._dump_thumb(current_video)
|
||||||
|
self._convert_thumb(current_video)
|
||||||
|
self._get_subtitles(current_video)
|
||||||
|
self._convert_video(current_video)
|
||||||
|
print(f"manual import: {current_video}")
|
||||||
|
|
||||||
|
ManualImport(current_video, self.CONFIG).run()
|
||||||
|
|
||||||
|
def _notify(self, idx, current_video):
|
||||||
|
"""send notification back to task"""
|
||||||
|
filename = os.path.split(current_video["media"])[-1]
|
||||||
|
if len(filename) > 50:
|
||||||
|
filename = filename[:50] + "..."
|
||||||
|
|
||||||
|
message = [
|
||||||
|
f"Import queue processing video {idx + 1}/{len(self.to_import)}",
|
||||||
|
filename,
|
||||||
|
]
|
||||||
|
progress = (idx + 1) / len(self.to_import)
|
||||||
|
self.task.send_progress(message, progress=progress)
|
||||||
|
|
||||||
|
def _detect_youtube_id(self, current_video):
|
||||||
|
"""find video id from filename or json"""
|
||||||
|
youtube_id = self._extract_id_from_filename(current_video["media"])
|
||||||
|
if youtube_id:
|
||||||
|
current_video["video_id"] = youtube_id
|
||||||
|
return
|
||||||
|
|
||||||
|
youtube_id = self._extract_id_from_json(current_video["metadata"])
|
||||||
|
if youtube_id:
|
||||||
|
current_video["video_id"] = youtube_id
|
||||||
|
return
|
||||||
|
|
||||||
|
raise ValueError("failed to find video id")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_id_from_filename(file_name):
|
||||||
|
"""
|
||||||
|
look at the file name for the youtube id
|
||||||
|
expects filename ending in [<youtube_id>].<ext>
|
||||||
|
"""
|
||||||
|
base_name, _ = os.path.splitext(file_name)
|
||||||
|
id_search = re.search(r"\[([a-zA-Z0-9_-]{11})\]$", base_name)
|
||||||
|
if id_search:
|
||||||
|
youtube_id = id_search.group(1)
|
||||||
|
return youtube_id
|
||||||
|
|
||||||
|
print(f"id extraction failed from filename: {file_name}")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _extract_id_from_json(self, json_file):
|
||||||
|
"""open json file and extract id"""
|
||||||
|
json_path = os.path.join(self.CACHE_DIR, "import", json_file)
|
||||||
|
with open(json_path, "r", encoding="utf-8") as f:
|
||||||
|
json_content = f.read()
|
||||||
|
|
||||||
|
youtube_id = json.loads(json_content)["id"]
|
||||||
|
|
||||||
|
return youtube_id
|
||||||
|
|
||||||
|
def _dump_thumb(self, current_video):
|
||||||
|
"""extract embedded thumb before converting"""
|
||||||
|
if current_video["thumb"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
media_path = current_video["media"]
|
||||||
|
_, ext = os.path.splitext(media_path)
|
||||||
|
|
||||||
|
new_path = False
|
||||||
|
if ext == ".mkv":
|
||||||
|
idx, thumb_type = self._get_mkv_thumb_stream(media_path)
|
||||||
|
if idx is not None:
|
||||||
|
new_path = self.dump_mpv_thumb(media_path, idx, thumb_type)
|
||||||
|
|
||||||
|
elif ext == ".mp4":
|
||||||
|
thumb_type = self.get_mp4_thumb_type(media_path)
|
||||||
|
if thumb_type:
|
||||||
|
new_path = self.dump_mp4_thumb(media_path, thumb_type)
|
||||||
|
|
||||||
|
if new_path:
|
||||||
|
current_video["thumb"] = new_path
|
||||||
|
|
||||||
|
def _get_mkv_thumb_stream(self, media_path):
|
||||||
|
"""get stream idx of thumbnail for mkv files"""
|
||||||
|
streams = self._get_streams(media_path)
|
||||||
|
attachments = [
|
||||||
|
i for i in streams["streams"] if i["codec_type"] == "attachment"
|
||||||
|
]
|
||||||
|
|
||||||
|
for idx, stream in enumerate(attachments):
|
||||||
|
tags = stream["tags"]
|
||||||
|
if "mimetype" in tags and tags["filename"].startswith("cover"):
|
||||||
|
_, ext = os.path.splitext(tags["filename"])
|
||||||
|
return idx, ext
|
||||||
|
|
||||||
|
return None, None
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump_mpv_thumb(media_path, idx, thumb_type):
|
||||||
|
"""write cover to disk for mkv"""
|
||||||
|
_, media_ext = os.path.splitext(media_path)
|
||||||
|
new_path = f"{media_path.rstrip(media_ext)}{thumb_type}"
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg",
|
||||||
|
"-v",
|
||||||
|
"quiet",
|
||||||
|
f"-dump_attachment:t:{idx}",
|
||||||
|
new_path,
|
||||||
|
"-i",
|
||||||
|
media_path,
|
||||||
|
],
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
def get_mp4_thumb_type(self, media_path):
|
||||||
|
"""detect filetype of embedded thumbnail"""
|
||||||
|
streams = self._get_streams(media_path)
|
||||||
|
|
||||||
|
for stream in streams["streams"]:
|
||||||
|
if stream["codec_name"] in ["png", "jpg"]:
|
||||||
|
return stream["codec_name"]
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _convert_thumb(self, current_video):
|
||||||
|
"""convert all thumbnails to jpg"""
|
||||||
|
if not current_video["thumb"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
thumb_path = current_video["thumb"]
|
||||||
|
|
||||||
|
base_path, ext = os.path.splitext(thumb_path)
|
||||||
|
if ext == ".jpg":
|
||||||
|
return
|
||||||
|
|
||||||
|
new_path = f"{base_path}.jpg"
|
||||||
|
img_raw = Image.open(thumb_path)
|
||||||
|
img_raw.convert("RGB").save(new_path)
|
||||||
|
|
||||||
|
os.remove(thumb_path)
|
||||||
|
current_video["thumb"] = new_path
|
||||||
|
|
||||||
|
def _get_subtitles(self, current_video):
|
||||||
|
"""find all subtitles in media file"""
|
||||||
|
if current_video["subtitle"]:
|
||||||
|
return
|
||||||
|
|
||||||
|
media_path = current_video["media"]
|
||||||
|
streams = self._get_streams(media_path)
|
||||||
|
base_path, ext = os.path.splitext(media_path)
|
||||||
|
|
||||||
|
if ext == ".webm":
|
||||||
|
print(f"{media_path}: subtitle extract from webm not supported")
|
||||||
|
return
|
||||||
|
|
||||||
|
for idx, stream in enumerate(streams["streams"]):
|
||||||
|
if stream["codec_type"] == "subtitle":
|
||||||
|
lang = ISO639Utils.long2short(stream["tags"]["language"])
|
||||||
|
sub_path = f"{base_path}.{lang}.vtt"
|
||||||
|
self._dump_subtitle(idx, media_path, sub_path)
|
||||||
|
current_video["subtitle"].append(sub_path)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _dump_subtitle(idx, media_path, sub_path):
|
||||||
|
"""extract subtitle from media file"""
|
||||||
|
subprocess.run(
|
||||||
|
["ffmpeg", "-i", media_path, "-map", f"0:{idx}", sub_path],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _get_streams(media_path):
|
||||||
|
"""return all streams from media_path"""
|
||||||
|
streams_raw = subprocess.run(
|
||||||
|
[
|
||||||
|
"ffprobe",
|
||||||
|
"-v",
|
||||||
|
"error",
|
||||||
|
"-show_streams",
|
||||||
|
"-print_format",
|
||||||
|
"json",
|
||||||
|
media_path,
|
||||||
|
],
|
||||||
|
capture_output=True,
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
streams = json.loads(streams_raw.stdout.decode())
|
||||||
|
|
||||||
|
return streams
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def dump_mp4_thumb(media_path, thumb_type):
|
||||||
|
"""save cover to disk"""
|
||||||
|
_, ext = os.path.splitext(media_path)
|
||||||
|
new_path = f"{media_path.rstrip(ext)}.{thumb_type}"
|
||||||
|
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg",
|
||||||
|
"-i",
|
||||||
|
media_path,
|
||||||
|
"-map",
|
||||||
|
"0:v",
|
||||||
|
"-map",
|
||||||
|
"-0:V",
|
||||||
|
"-c",
|
||||||
|
"copy",
|
||||||
|
new_path,
|
||||||
|
],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_path
|
||||||
|
|
||||||
|
def _convert_video(self, current_video):
|
||||||
|
"""convert if needed"""
|
||||||
|
current_path = current_video["media"]
|
||||||
|
base_path, ext = os.path.splitext(current_path)
|
||||||
|
if ext == ".mp4":
|
||||||
|
return
|
||||||
|
|
||||||
|
new_path = base_path + ".mp4"
|
||||||
|
subprocess.run(
|
||||||
|
[
|
||||||
|
"ffmpeg",
|
||||||
|
"-i",
|
||||||
|
current_path,
|
||||||
|
new_path,
|
||||||
|
"-loglevel",
|
||||||
|
"warning",
|
||||||
|
"-stats",
|
||||||
|
],
|
||||||
|
check=True,
|
||||||
|
)
|
||||||
|
current_video["media"] = new_path
|
||||||
|
os.remove(current_path)
|
||||||
|
|
||||||
|
|
||||||
|
class ManualImport:
|
||||||
|
"""import single identified video"""
|
||||||
|
|
||||||
|
def __init__(self, current_video, config):
|
||||||
|
self.current_video = current_video
|
||||||
|
self.config = config
|
||||||
|
|
||||||
|
def run(self):
|
||||||
|
"""run all"""
|
||||||
|
json_data = self.index_metadata()
|
||||||
|
self._move_to_archive(json_data)
|
||||||
|
self._cleanup(json_data)
|
||||||
|
|
||||||
|
def index_metadata(self):
|
||||||
|
"""get metadata from yt or json"""
|
||||||
|
video_id = self.current_video["video_id"]
|
||||||
|
video = YoutubeVideo(video_id)
|
||||||
|
video.build_json(
|
||||||
|
youtube_meta_overwrite=self._get_info_json(),
|
||||||
|
media_path=self.current_video["media"],
|
||||||
|
)
|
||||||
|
if not video.json_data:
|
||||||
|
print(f"{video_id}: manual import failed, and no metadata found.")
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
video.check_subtitles(subtitle_files=self.current_video["subtitle"])
|
||||||
|
video.upload_to_es()
|
||||||
|
|
||||||
|
if video.offline_import and self.current_video["thumb"]:
|
||||||
|
old_path = self.current_video["thumb"]
|
||||||
|
thumbs = ThumbManager(video_id)
|
||||||
|
new_path = thumbs.vid_thumb_path(absolute=True, create_folder=True)
|
||||||
|
shutil.move(old_path, new_path, copy_function=shutil.copyfile)
|
||||||
|
else:
|
||||||
|
url = video.json_data["vid_thumb_url"]
|
||||||
|
ThumbManager(video_id).download_video_thumb(url)
|
||||||
|
|
||||||
|
return video.json_data
|
||||||
|
|
||||||
|
def _get_info_json(self):
|
||||||
|
"""read info_json from file"""
|
||||||
|
if not self.current_video["metadata"]:
|
||||||
|
return False
|
||||||
|
|
||||||
|
with open(self.current_video["metadata"], "r", encoding="utf-8") as f:
|
||||||
|
info_json = json.loads(f.read())
|
||||||
|
|
||||||
|
return info_json
|
||||||
|
|
||||||
|
def _move_to_archive(self, json_data):
|
||||||
|
"""move identified media file to archive"""
|
||||||
|
videos = self.config["application"]["videos"]
|
||||||
|
|
||||||
|
channel, file = os.path.split(json_data["media_url"])
|
||||||
|
channel_folder = os.path.join(videos, channel)
|
||||||
|
if not os.path.exists(channel_folder):
|
||||||
|
os.makedirs(channel_folder)
|
||||||
|
|
||||||
|
old_path = self.current_video["media"]
|
||||||
|
new_path = os.path.join(channel_folder, file)
|
||||||
|
shutil.move(old_path, new_path, copy_function=shutil.copyfile)
|
||||||
|
|
||||||
|
base_name, _ = os.path.splitext(new_path)
|
||||||
|
for old_path in self.current_video["subtitle"]:
|
||||||
|
lang = old_path.split(".")[-2]
|
||||||
|
new_path = f"{base_name}.{lang}.vtt"
|
||||||
|
shutil.move(old_path, new_path, copy_function=shutil.copyfile)
|
||||||
|
|
||||||
|
def _cleanup(self, json_data):
|
||||||
|
"""cleanup leftover files"""
|
||||||
|
meta_data = self.current_video["metadata"]
|
||||||
|
if meta_data and os.path.exists(meta_data):
|
||||||
|
os.remove(meta_data)
|
||||||
|
|
||||||
|
thumb = self.current_video["thumb"]
|
||||||
|
if thumb and os.path.exists(thumb):
|
||||||
|
os.remove(thumb)
|
||||||
|
|
||||||
|
for subtitle_file in self.current_video["subtitle"]:
|
||||||
|
if os.path.exists(subtitle_file):
|
||||||
|
os.remove(subtitle_file)
|
||||||
|
|
||||||
|
channel_info = os.path.join(
|
||||||
|
self.config["application"]["cache_dir"],
|
||||||
|
"import",
|
||||||
|
f"{json_data['channel']['channel_id']}.info.json",
|
||||||
|
)
|
||||||
|
if os.path.exists(channel_info):
|
||||||
|
os.remove(channel_info)
|
@ -0,0 +1,110 @@
|
|||||||
|
"""
|
||||||
|
functionality:
|
||||||
|
- interact with in redis stored task results
|
||||||
|
- handle threads and locks
|
||||||
|
"""
|
||||||
|
|
||||||
|
from home import tasks as ta_tasks
|
||||||
|
from home.src.ta.ta_redis import TaskRedis
|
||||||
|
|
||||||
|
|
||||||
|
class TaskManager:
|
||||||
|
"""manage tasks"""
|
||||||
|
|
||||||
|
def get_all_results(self):
|
||||||
|
"""return all task results"""
|
||||||
|
handler = TaskRedis()
|
||||||
|
all_keys = handler.get_all()
|
||||||
|
if not all_keys:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return [handler.get_single(i) for i in all_keys]
|
||||||
|
|
||||||
|
def get_tasks_by_name(self, task_name):
|
||||||
|
"""get all tasks by name"""
|
||||||
|
all_results = self.get_all_results()
|
||||||
|
if not all_results:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return [i for i in all_results if i.get("name") == task_name]
|
||||||
|
|
||||||
|
def get_task(self, task_id):
|
||||||
|
"""get single task"""
|
||||||
|
return TaskRedis().get_single(task_id)
|
||||||
|
|
||||||
|
def is_pending(self, task):
|
||||||
|
"""check if task_name is pending, pass task object"""
|
||||||
|
tasks = self.get_tasks_by_name(task.name)
|
||||||
|
if not tasks:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return bool([i for i in tasks if i.get("status") == "PENDING"])
|
||||||
|
|
||||||
|
def is_stopped(self, task_id):
|
||||||
|
"""check if task_id has received STOP command"""
|
||||||
|
task = self.get_task(task_id)
|
||||||
|
|
||||||
|
return task.get("command") == "STOP"
|
||||||
|
|
||||||
|
def get_pending(self, task_name):
|
||||||
|
"""get all pending tasks of task_name"""
|
||||||
|
tasks = self.get_tasks_by_name(task_name)
|
||||||
|
if not tasks:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return [i for i in tasks if i.get("status") == "PENDING"]
|
||||||
|
|
||||||
|
def init(self, task):
|
||||||
|
"""pass task object from bind task to set initial pending message"""
|
||||||
|
message = {
|
||||||
|
"status": "PENDING",
|
||||||
|
"result": None,
|
||||||
|
"traceback": None,
|
||||||
|
"date_done": False,
|
||||||
|
"name": task.name,
|
||||||
|
"task_id": task.request.id,
|
||||||
|
}
|
||||||
|
TaskRedis().set_key(task.request.id, message)
|
||||||
|
|
||||||
|
def fail_pending(self):
|
||||||
|
"""
|
||||||
|
mark all pending as failed,
|
||||||
|
run at startup to recover from hard reset
|
||||||
|
"""
|
||||||
|
all_results = self.get_all_results()
|
||||||
|
for result in all_results:
|
||||||
|
if result.get("status") == "PENDING":
|
||||||
|
result["status"] = "FAILED"
|
||||||
|
TaskRedis().set_key(result["task_id"], result, expire=True)
|
||||||
|
|
||||||
|
|
||||||
|
class TaskCommand:
|
||||||
|
"""run commands on task"""
|
||||||
|
|
||||||
|
def start(self, task_name):
|
||||||
|
"""start task by task_name, only pass task that don't take args"""
|
||||||
|
task = ta_tasks.app.tasks.get(task_name).delay()
|
||||||
|
message = {
|
||||||
|
"task_id": task.id,
|
||||||
|
"status": task.status,
|
||||||
|
"task_name": task.name,
|
||||||
|
}
|
||||||
|
|
||||||
|
return message
|
||||||
|
|
||||||
|
def stop(self, task_id):
|
||||||
|
"""
|
||||||
|
send stop signal to task_id,
|
||||||
|
needs to be implemented in task to take effect
|
||||||
|
"""
|
||||||
|
handler = TaskRedis()
|
||||||
|
|
||||||
|
task = handler.get_single(task_id)
|
||||||
|
if not task["name"] in ta_tasks.BaseTask.TASK_CONFIG:
|
||||||
|
raise ValueError
|
||||||
|
|
||||||
|
handler.set_command(task_id, "STOP")
|
||||||
|
|
||||||
|
def kill(self, task_id):
|
||||||
|
"""send kill signal to task_id"""
|
||||||
|
ta_tasks.app.control.revoke(task_id, terminate=True)
|
Loading…
Reference in New Issue