Feature beat model (#713)
* add django-celery-beat * implement schedule migration * fix version_check migration * remove old schedule init * better schedule migration * fix task_config migration * show task config on settings page * fix notify url builder * refactor celery initiation * fix get task * fix scheduler mig * fix linter * better task_config store on periodic task * save new schedules * fix task_config extraction from custom model * implement auto schedule * implement schedule delete * refactor notifications to ES config storage * downgrade redis * better notification migration to ES * add notification url handling * fix worker start * fix docs spelling * don't resend form data on notification refresh * fix type hints * move TASK_CONFIG to separate module * fix partial task config imports * fix yt_obs typing * delete schedule * remove outdated instructions * create initial schedules * fix reindex days config key * fix doc string * unregister BeatModelstesting
parent
011073617d
commit
9366b8eab9
@ -1,5 +1,7 @@
|
||||
""" handle celery startup """
|
||||
"""start celery app"""
|
||||
|
||||
from .tasks import app as celery_app
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from home.celery import app as celery_app
|
||||
|
||||
__all__ = ("celery_app",)
|
||||
|
@ -0,0 +1,24 @@
|
||||
"""initiate celery"""
|
||||
|
||||
import os
|
||||
|
||||
from celery import Celery
|
||||
from home.src.ta.config import AppConfig
|
||||
from home.src.ta.settings import EnvironmentSettings
|
||||
|
||||
CONFIG = AppConfig().config
|
||||
REDIS_HOST = EnvironmentSettings.REDIS_HOST
|
||||
REDIS_PORT = EnvironmentSettings.REDIS_PORT
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
app = Celery(
|
||||
"tasks",
|
||||
broker=f"redis://{REDIS_HOST}:{REDIS_PORT}",
|
||||
backend=f"redis://{REDIS_HOST}:{REDIS_PORT}",
|
||||
result_extended=True,
|
||||
)
|
||||
app.config_from_object(
|
||||
"django.conf:settings", namespace=EnvironmentSettings.REDIS_NAME_SPACE
|
||||
)
|
||||
app.autodiscover_tasks()
|
||||
app.conf.timezone = EnvironmentSettings.TZ
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-05 13:47
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('django_celery_beat', '0018_improve_crontab_helptext'),
|
||||
('home', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomPeriodicTask',
|
||||
fields=[
|
||||
('periodictask_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_celery_beat.periodictask')),
|
||||
('task_config', models.JSONField(default=dict)),
|
||||
],
|
||||
bases=('django_celery_beat.periodictask',),
|
||||
),
|
||||
]
|
@ -0,0 +1,95 @@
|
||||
"""
|
||||
Functionality:
|
||||
- handle schedule forms
|
||||
- implement form validation
|
||||
"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
from django import forms
|
||||
|
||||
|
||||
class CrontabValidator:
|
||||
"""validate crontab"""
|
||||
|
||||
@staticmethod
|
||||
def validate_fields(cron_fields):
|
||||
"""expect 3 cron fields"""
|
||||
if not len(cron_fields) == 3:
|
||||
raise forms.ValidationError("expected three cron schedule fields")
|
||||
|
||||
@staticmethod
|
||||
def validate_minute(minute_field):
|
||||
"""expect minute int"""
|
||||
try:
|
||||
minute_value = int(minute_field)
|
||||
if not 0 <= minute_value <= 59:
|
||||
raise forms.ValidationError(
|
||||
"Invalid value for minutes. Must be between 0 and 59."
|
||||
)
|
||||
except ValueError as err:
|
||||
raise forms.ValidationError(
|
||||
"Invalid value for minutes. Must be an integer."
|
||||
) from err
|
||||
|
||||
@staticmethod
|
||||
def validate_cron_tab(minute, hour, day_of_week):
|
||||
"""check if crontab can be created"""
|
||||
try:
|
||||
crontab(minute=minute, hour=hour, day_of_week=day_of_week)
|
||||
except ValueError as err:
|
||||
raise forms.ValidationError(f"invalid crontab: {err}") from err
|
||||
|
||||
def validate(self, cron_expression):
|
||||
"""create crontab schedule"""
|
||||
if cron_expression == "auto":
|
||||
return
|
||||
|
||||
cron_fields = cron_expression.split()
|
||||
self.validate_fields(cron_fields)
|
||||
|
||||
minute, hour, day_of_week = cron_fields
|
||||
self.validate_minute(minute)
|
||||
self.validate_cron_tab(minute, hour, day_of_week)
|
||||
|
||||
|
||||
def validate_cron(cron_expression):
|
||||
"""callable for field"""
|
||||
CrontabValidator().validate(cron_expression)
|
||||
|
||||
|
||||
class SchedulerSettingsForm(forms.Form):
|
||||
"""handle scheduler settings"""
|
||||
|
||||
update_subscribed = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
download_pending = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
check_reindex = forms.CharField(required=False, validators=[validate_cron])
|
||||
check_reindex_days = forms.IntegerField(required=False)
|
||||
thumbnail_check = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
run_backup = forms.CharField(required=False, validators=[validate_cron])
|
||||
run_backup_rotate = forms.IntegerField(required=False)
|
||||
|
||||
|
||||
class NotificationSettingsForm(forms.Form):
|
||||
"""add notification URL"""
|
||||
|
||||
TASK_CHOICES = [
|
||||
("", "-- select task --"),
|
||||
("update_subscribed", "Rescan your Subscriptions"),
|
||||
("download_pending", "Downloading"),
|
||||
("check_reindex", "Reindex Documents"),
|
||||
]
|
||||
PLACEHOLDER = "Apprise notification URL"
|
||||
|
||||
task = forms.ChoiceField(
|
||||
widget=forms.Select, choices=TASK_CHOICES, required=False
|
||||
)
|
||||
notification_url = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"placeholder": PLACEHOLDER}),
|
||||
)
|
@ -0,0 +1,89 @@
|
||||
"""
|
||||
Functionality:
|
||||
- Handle scheduler config update
|
||||
"""
|
||||
|
||||
from django_celery_beat.models import CrontabSchedule
|
||||
from home.models import CustomPeriodicTask
|
||||
from home.src.ta.config import AppConfig
|
||||
from home.src.ta.settings import EnvironmentSettings
|
||||
from home.src.ta.task_config import TASK_CONFIG
|
||||
|
||||
|
||||
class ScheduleBuilder:
|
||||
"""build schedule dicts for beat"""
|
||||
|
||||
SCHEDULES = {
|
||||
"update_subscribed": "0 8 *",
|
||||
"download_pending": "0 16 *",
|
||||
"check_reindex": "0 12 *",
|
||||
"thumbnail_check": "0 17 *",
|
||||
"run_backup": "0 18 0",
|
||||
"version_check": "0 11 *",
|
||||
}
|
||||
CONFIG = {
|
||||
"check_reindex_days": "check_reindex",
|
||||
"run_backup_rotate": "run_backup",
|
||||
"update_subscribed_notify": "update_subscribed",
|
||||
"download_pending_notify": "download_pending",
|
||||
"check_reindex_notify": "check_reindex",
|
||||
}
|
||||
MSG = "message:setting"
|
||||
|
||||
def __init__(self):
|
||||
self.config = AppConfig().config
|
||||
|
||||
def update_schedule_conf(self, form_post):
|
||||
"""process form post, schedules need to be validated before"""
|
||||
for key, value in form_post.items():
|
||||
if not value:
|
||||
continue
|
||||
|
||||
if key in self.SCHEDULES:
|
||||
if value == "auto":
|
||||
value = self.SCHEDULES.get(key)
|
||||
|
||||
_ = self.get_set_task(key, value)
|
||||
continue
|
||||
|
||||
if key in self.CONFIG:
|
||||
self.set_config(key, value)
|
||||
|
||||
def get_set_task(self, task_name, schedule=False):
|
||||
"""get task"""
|
||||
try:
|
||||
task = CustomPeriodicTask.objects.get(name=task_name)
|
||||
except CustomPeriodicTask.DoesNotExist:
|
||||
description = TASK_CONFIG[task_name].get("title")
|
||||
task = CustomPeriodicTask(
|
||||
name=task_name,
|
||||
task=task_name,
|
||||
description=description,
|
||||
)
|
||||
|
||||
if schedule:
|
||||
task_crontab = self.get_set_cron_tab(schedule)
|
||||
task.crontab = task_crontab
|
||||
task.save()
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def get_set_cron_tab(schedule):
|
||||
"""needs to be validated before"""
|
||||
kwargs = dict(zip(["minute", "hour", "day_of_week"], schedule.split()))
|
||||
kwargs.update({"timezone": EnvironmentSettings.TZ})
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(**kwargs)
|
||||
|
||||
return crontab
|
||||
|
||||
def set_config(self, key, value):
|
||||
"""set task_config"""
|
||||
task_name = self.CONFIG.get(key)
|
||||
if not task_name:
|
||||
raise ValueError("invalid config key")
|
||||
|
||||
task = CustomPeriodicTask.objects.get(name=task_name)
|
||||
config_key = key.split(f"{task_name}_")[-1]
|
||||
task.task_config.update({config_key: value})
|
||||
task.save()
|
@ -1,55 +1,141 @@
|
||||
"""send notifications using apprise"""
|
||||
|
||||
import apprise
|
||||
from home.src.ta.config import AppConfig
|
||||
from home.src.es.connect import ElasticWrap
|
||||
from home.src.ta.task_config import TASK_CONFIG
|
||||
from home.src.ta.task_manager import TaskManager
|
||||
|
||||
|
||||
class Notifications:
|
||||
"""notification handler"""
|
||||
"""store notifications in ES"""
|
||||
|
||||
def __init__(self, name: str, task_id: str, task_title: str):
|
||||
self.name: str = name
|
||||
self.task_id: str = task_id
|
||||
self.task_title: str = task_title
|
||||
GET_PATH = "ta_config/_doc/notify"
|
||||
UPDATE_PATH = "ta_config/_update/notify/"
|
||||
|
||||
def send(self) -> None:
|
||||
def __init__(self, task_name: str):
|
||||
self.task_name = task_name
|
||||
|
||||
def send(self, task_id: str, task_title: str) -> None:
|
||||
"""send notifications"""
|
||||
apobj = apprise.Apprise()
|
||||
hooks: str | None = self.get_url()
|
||||
if not hooks:
|
||||
urls: list[str] = self.get_urls()
|
||||
if not urls:
|
||||
return
|
||||
|
||||
hook_list: list[str] = self.parse_hooks(hooks=hooks)
|
||||
title, body = self.build_message()
|
||||
title, body = self._build_message(task_id, task_title)
|
||||
|
||||
if not body:
|
||||
return
|
||||
|
||||
for hook in hook_list:
|
||||
apobj.add(hook)
|
||||
for url in urls:
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(body=body, title=title)
|
||||
|
||||
def get_url(self) -> str | None:
|
||||
"""get apprise urls for task"""
|
||||
config = AppConfig().config
|
||||
hooks: str = config["scheduler"].get(f"{self.name}_notify")
|
||||
|
||||
return hooks
|
||||
|
||||
def parse_hooks(self, hooks: str) -> list[str]:
|
||||
"""create list of hooks"""
|
||||
|
||||
hook_list: list[str] = [i.strip() for i in hooks.split()]
|
||||
|
||||
return hook_list
|
||||
|
||||
def build_message(self) -> tuple[str, str | None]:
|
||||
def _build_message(
|
||||
self, task_id: str, task_title: str
|
||||
) -> tuple[str, str | None]:
|
||||
"""build message to send notification"""
|
||||
task = TaskManager().get_task(self.task_id)
|
||||
task = TaskManager().get_task(task_id)
|
||||
status = task.get("status")
|
||||
title: str = f"[TA] {self.task_title} process ended with {status}"
|
||||
title: str = f"[TA] {task_title} process ended with {status}"
|
||||
body: str | None = task.get("result")
|
||||
|
||||
return title, body
|
||||
|
||||
def get_urls(self) -> list[str]:
|
||||
"""get stored urls for task"""
|
||||
response, code = ElasticWrap(self.GET_PATH).get(print_error=False)
|
||||
if not code == 200:
|
||||
return []
|
||||
|
||||
urls = response["_source"].get(self.task_name, [])
|
||||
|
||||
return urls
|
||||
|
||||
def add_url(self, url: str) -> None:
|
||||
"""add url to task notification"""
|
||||
source = (
|
||||
"if (!ctx._source.containsKey(params.task_name)) "
|
||||
+ "{ctx._source[params.task_name] = [params.url]} "
|
||||
+ "else if (!ctx._source[params.task_name].contains(params.url)) "
|
||||
+ "{ctx._source[params.task_name].add(params.url)} "
|
||||
+ "else {ctx.op = 'none'}"
|
||||
)
|
||||
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"url": url, "task_name": self.task_name},
|
||||
},
|
||||
"upsert": {self.task_name: [url]},
|
||||
}
|
||||
|
||||
_, _ = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
|
||||
def remove_url(self, url: str) -> tuple[dict, int]:
|
||||
"""remove url from task"""
|
||||
source = (
|
||||
"if (ctx._source.containsKey(params.task_name) "
|
||||
+ "&& ctx._source[params.task_name].contains(params.url)) "
|
||||
+ "{ctx._source[params.task_name]."
|
||||
+ "remove(ctx._source[params.task_name].indexOf(params.url))}"
|
||||
)
|
||||
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"url": url, "task_name": self.task_name},
|
||||
}
|
||||
}
|
||||
|
||||
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
if not self.get_urls():
|
||||
_, _ = self.remove_task()
|
||||
|
||||
return response, status_code
|
||||
|
||||
def remove_task(self) -> tuple[dict, int]:
|
||||
"""remove all notifications from task"""
|
||||
source = (
|
||||
"if (ctx._source.containsKey(params.task_name)) "
|
||||
+ "{ctx._source.remove(params.task_name)}"
|
||||
)
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"task_name": self.task_name},
|
||||
}
|
||||
}
|
||||
|
||||
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def get_all_notifications() -> dict[str, list[str]]:
|
||||
"""get all notifications stored"""
|
||||
path = "ta_config/_doc/notify"
|
||||
response, status_code = ElasticWrap(path).get(print_error=False)
|
||||
if not status_code == 200:
|
||||
return {}
|
||||
|
||||
notifications: dict = {}
|
||||
source = response.get("_source")
|
||||
if not source:
|
||||
return notifications
|
||||
|
||||
for task_id, urls in source.items():
|
||||
notifications.update(
|
||||
{
|
||||
task_id: {
|
||||
"urls": urls,
|
||||
"title": TASK_CONFIG[task_id]["title"],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return notifications
|
||||
|
@ -0,0 +1,125 @@
|
||||
"""
|
||||
Functionality:
|
||||
- Static Task config values
|
||||
- Type definitions
|
||||
- separate to avoid circular imports
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class TaskItemConfig(TypedDict):
|
||||
"""describes a task item config"""
|
||||
|
||||
title: str
|
||||
group: str
|
||||
api_start: bool
|
||||
api_stop: bool
|
||||
|
||||
|
||||
UPDATE_SUBSCRIBED: TaskItemConfig = {
|
||||
"title": "Rescan your Subscriptions",
|
||||
"group": "download:scan",
|
||||
"api_start": True,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
DOWNLOAD_PENDING: TaskItemConfig = {
|
||||
"title": "Downloading",
|
||||
"group": "download:run",
|
||||
"api_start": True,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
EXTRACT_DOWNLOAD: TaskItemConfig = {
|
||||
"title": "Add to download queue",
|
||||
"group": "download:add",
|
||||
"api_start": False,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
CHECK_REINDEX: TaskItemConfig = {
|
||||
"title": "Reindex Documents",
|
||||
"group": "reindex:run",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
MANUAL_IMPORT: TaskItemConfig = {
|
||||
"title": "Manual video import",
|
||||
"group": "setting:import",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RUN_BACKUP: TaskItemConfig = {
|
||||
"title": "Index Backup",
|
||||
"group": "setting:backup",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESTORE_BACKUP: TaskItemConfig = {
|
||||
"title": "Restore Backup",
|
||||
"group": "setting:restore",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESCAN_FILESYSTEM: TaskItemConfig = {
|
||||
"title": "Rescan your Filesystem",
|
||||
"group": "setting:filesystemscan",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
THUMBNAIL_CHECK: TaskItemConfig = {
|
||||
"title": "Check your Thumbnails",
|
||||
"group": "setting:thumbnailcheck",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESYNC_THUMBS: TaskItemConfig = {
|
||||
"title": "Sync Thumbnails to Media Files",
|
||||
"group": "setting:thumbnailsync",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
INDEX_PLAYLISTS: TaskItemConfig = {
|
||||
"title": "Index Channel Playlist",
|
||||
"group": "channel:indexplaylist",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
SUBSCRIBE_TO: TaskItemConfig = {
|
||||
"title": "Add Subscription",
|
||||
"group": "subscription:add",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
VERSION_CHECK: TaskItemConfig = {
|
||||
"title": "Look for new Version",
|
||||
"group": "",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
TASK_CONFIG: dict[str, TaskItemConfig] = {
|
||||
"update_subscribed": UPDATE_SUBSCRIBED,
|
||||
"download_pending": DOWNLOAD_PENDING,
|
||||
"extract_download": EXTRACT_DOWNLOAD,
|
||||
"check_reindex": CHECK_REINDEX,
|
||||
"manual_import": MANUAL_IMPORT,
|
||||
"run_backup": RUN_BACKUP,
|
||||
"restore_backup": RESTORE_BACKUP,
|
||||
"rescan_filesystem": RESCAN_FILESYSTEM,
|
||||
"thumbnail_check": THUMBNAIL_CHECK,
|
||||
"resync_thumbs": RESYNC_THUMBS,
|
||||
"index_playlists": INDEX_PLAYLISTS,
|
||||
"subscribe_to": SUBSCRIBE_TO,
|
||||
"version_check": VERSION_CHECK,
|
||||
}
|
@ -1,13 +1,14 @@
|
||||
apprise==1.7.5
|
||||
celery==5.3.6
|
||||
apprise==1.7.6
|
||||
celery==5.4.0
|
||||
Django==5.0.4
|
||||
django-auth-ldap==4.8.0
|
||||
django-celery-beat==2.6.0
|
||||
django-cors-headers==4.3.1
|
||||
djangorestframework==3.15.1
|
||||
Pillow==10.3.0
|
||||
redis==5.0.0
|
||||
redis==5.0.3
|
||||
requests==2.31.0
|
||||
ryd-client==0.0.6
|
||||
uWSGI==2.0.24
|
||||
uWSGI==2.0.25.1
|
||||
whitenoise==6.6.0
|
||||
yt-dlp @ git+https://github.com/bbilly1/yt-dlp@4935eec0b4f4dffbd86d998a2d3a706875e9d761
|
||||
|
Loading…
Reference in New Issue