Compare commits
296 Commits
Author | SHA1 | Date |
---|---|---|
Simon | cfe94c8dac | 2 months ago |
Simon | e3e56a31e5 | 2 months ago |
Simon | 2ea4872bc7 | 2 months ago |
Simon | 8550421a9a | 2 months ago |
Simon | aabc883d84 | 2 months ago |
Simon | 90441ba678 | 2 months ago |
Simon | 1db738ce89 | 2 months ago |
Simon | b97cb9d264 | 2 months ago |
Simon | 12f9517838 | 3 months ago |
Simon | eba40af1ab | 3 months ago |
Simon | 807c30caa0 | 3 months ago |
Simon | 24b57406ab | 3 months ago |
Simon | b0b72a6e6e | 3 months ago |
Simon | 93b921582d | 3 months ago |
Simon | 437cd72f45 | 3 months ago |
Simon | 81bf614afa | 3 months ago |
Simon | 9b84169fab | 3 months ago |
Simon | 381f97e6d2 | 3 months ago |
Simon | 039a49387c | 3 months ago |
Simon | c9007431bf | 3 months ago |
Simon | 1fcea860c8 | 3 months ago |
Simon | 963d952dfa | 3 months ago |
Simon | cba6b0a33e | 3 months ago |
Heavybullets8 | 5fac26156b | 3 months ago |
Boo1098 | 9eba3e278d | 4 months ago |
Simon | e3d5a0885b | 4 months ago |
Simon | 9f484eca83 | 4 months ago |
Simon | e8691a8edb | 4 months ago |
Simon | 08346d6dc7 | 4 months ago |
Simon | 64016389f7 | 4 months ago |
Simon | a3afa23bc7 | 4 months ago |
Simon | 9068e4569d | 4 months ago |
Simon | 97b8439856 | 4 months ago |
Simon | d4165327df | 4 months ago |
Simon | 9fe0aa85d0 | 4 months ago |
Simon | 87e3814751 | 4 months ago |
Simon | a201d43bfd | 4 months ago |
Simon | 200f5ed5a2 | 4 months ago |
Simon | 7a265231bc | 4 months ago |
Simon | ed3183b42c | 4 months ago |
Simon | a1e3512bab | 4 months ago |
Simon | 241e76c17d | 4 months ago |
Simon | 7111fedd5c | 4 months ago |
Simon | 40beb35f34 | 4 months ago |
Simon | d7818871a4 | 4 months ago |
Simon | d3103ccc30 | 4 months ago |
Simon | 30c20b41d9 | 4 months ago |
Simon | cfa0cabd41 | 4 months ago |
Simon | 8a2040aa26 | 4 months ago |
Simon | e2253fc63b | 4 months ago |
Simon | f9eff28da7 | 4 months ago |
Simon | 873e7d1e8d | 4 months ago |
Simon | 91f9351502 | 4 months ago |
Simon | 4a49d70b17 | 4 months ago |
Simon | ca1864843a | 4 months ago |
Simon | 819d166296 | 4 months ago |
Simon | d6b5521c11 | 4 months ago |
Simon | c9285ce3a1 | 4 months ago |
Simon | 3e3e3ae78e | 4 months ago |
Simon | ae89f47072 | 4 months ago |
Simon | 1bcb4f1f7c | 4 months ago |
Simon | 758038f93d | 4 months ago |
Simon | d6f679784e | 4 months ago |
Simon | c7ebbaa207 | 4 months ago |
Simon | a2000b253b | 4 months ago |
Simon | 6e9ae583c9 | 4 months ago |
Simon | ee5d73917c | 4 months ago |
Simon | 478d05997c | 5 months ago |
Simon | 0e305f60f4 | 5 months ago |
Simon | 3c441c3a31 | 5 months ago |
Simon | 228677ddfb | 5 months ago |
Simon | dc0c82c814 | 5 months ago |
Simon | 34ad0ca07d | 5 months ago |
Simon | c82d9e2140 | 5 months ago |
Simon | bbf59eaca9 | 5 months ago |
Simon | e42737ad9b | 5 months ago |
Simon | 1d9c274390 | 5 months ago |
Simon | 2a35b42d88 | 5 months ago |
Simon | 11ab314649 | 5 months ago |
Simon | d58d133baf | 5 months ago |
Simon | d6f3fd883c | 5 months ago |
Simon | 8f1a5c8557 | 5 months ago |
Simon | fdc427977e | 5 months ago |
Simon | db080e97bb | 5 months ago |
Simon | 5235af3d91 | 5 months ago |
Simon | 6ab70c7602 | 5 months ago |
Simon | 86d157699a | 5 months ago |
Simon | c176405b32 | 5 months ago |
Simon | 6e7cb74366 | 5 months ago |
Simon | 05cfeb9d99 | 5 months ago |
Simon | 4edb5adead | 5 months ago |
Simon | c17627a911 | 5 months ago |
Simon | 36a738d5d7 | 5 months ago |
Simon | d6c4a6ea46 | 5 months ago |
Simon | 77ee9cfc13 | 5 months ago |
Simon | c413811e17 | 5 months ago |
Simon | 2a9769d154 | 5 months ago |
Simon | d9ce9641e2 | 5 months ago |
Simon | f874d402b1 | 5 months ago |
Simon | 97b6d7d606 | 5 months ago |
Simon | 4a38636ef3 | 5 months ago |
Simon | 97bc03f855 | 5 months ago |
Simon | 770990c568 | 5 months ago |
Simon | ddc4685811 | 5 months ago |
Simon | 56220a94e0 | 5 months ago |
Simon | fd039de53d | 5 months ago |
Simon | 320ead0bd2 | 5 months ago |
Simon | e33341d30d | 5 months ago |
Simon | 21b79e7c8f | 5 months ago |
Simon | 9366b8eab9 | 5 months ago |
Simon | 011073617d | 5 months ago |
Simon | 784f90b16d | 5 months ago |
Simon | c1cd9bc8eb | 5 months ago |
Simon | e0f1828d9c | 5 months ago |
Simon | f5a2e624d8 | 5 months ago |
Simon | dc08c83da5 | 5 months ago |
Simon | 33ecd73137 | 5 months ago |
Simon | cb6476fa8c | 5 months ago |
Simon | ec64a88d1e | 6 months ago |
Simon | 0c487e6339 | 6 months ago |
Simon | f7ad1000c7 | 6 months ago |
Simon | aecd189d04 | 6 months ago |
Simon | b735a770e3 | 6 months ago |
Simon | 5c84a2cbf8 | 6 months ago |
Simon | a4d062fa52 | 6 months ago |
Simon | 9c34bb01d9 | 6 months ago |
Simon | 8c38a2eb69 | 6 months ago |
Simon | 852abf254d | 6 months ago |
Simon | 25edff28e7 | 6 months ago |
lamusmaser | 731f4b6111 | 6 months ago |
Simon | e512329599 | 6 months ago |
Simon | e26b039899 | 6 months ago |
Simon | 8bf7f71351 | 6 months ago |
Simon | a72be27982 | 7 months ago |
Simon | b2c1b417e5 | 7 months ago |
Simon | a348b4a810 | 7 months ago |
Simon | bb8db53f7d | 7 months ago |
Simon | 2711537a4d | 7 months ago |
dot-mike | 45f455070d | 7 months ago |
Simon | 6dcef70b8e | 7 months ago |
Simon | c993a5de5c | 7 months ago |
Greg | 090d88c336 | 7 months ago |
Nick | 0e967d721f | 7 months ago |
Simon | c32dbf8bc8 | 7 months ago |
dot-mike | df08a6d591 | 7 months ago |
DarkFighterLuke | 9339b9227e | 7 months ago |
Simon | 8778546577 | 8 months ago |
Simon | 0ff27ebfb9 | 8 months ago |
Simon | 0d863ef557 | 8 months ago |
Simon | 56ca49d0e2 | 8 months ago |
Simon | 27b6efcab7 | 9 months ago |
Simon | 18ba808664 | 9 months ago |
Simon | 65738ef52c | 9 months ago |
Simon | 4049a2a3c1 | 9 months ago |
PhuriousGeorge | 49659322a1 | 9 months ago |
Simon | 4078eb307f | 9 months ago |
Daniel Jue | 7f056b38f4 | 9 months ago |
Simon | 86fe31d258 | 9 months ago |
Simon | 5b26433599 | 9 months ago |
Simon | 4d2fc5423e | 9 months ago |
Simon | 94295cdbd4 | 9 months ago |
Simon | b84bf78974 | 9 months ago |
Simon | 14e23a4371 | 9 months ago |
Simon | fe8f4faa10 | 9 months ago |
Simon | ddc0b7a481 | 10 months ago |
Simon | 7eec3ece49 | 10 months ago |
Simon | 789c35e2b5 | 10 months ago |
Simon | 8870782a6e | 10 months ago |
Simon | e75ffb603c | 10 months ago |
Simon | feabc87c9f | 10 months ago |
Simon | 6f1a45ffb1 | 10 months ago |
Simon | 098db97cba | 10 months ago |
Simon | 597da56975 | 10 months ago |
Simon | 325bdf5cba | 10 months ago |
Simon | db2f249979 | 10 months ago |
Simon | b61b8635b8 | 10 months ago |
Simon | 5aafc21bda | 10 months ago |
lamusmaser | 099c70a13b | 10 months ago |
Simon | 43708ee2a3 | 10 months ago |
Simon | cfb15c1a78 | 10 months ago |
Simon | e9a95d7ada | 10 months ago |
Simon | a21a111221 | 10 months ago |
Simon | 18e504faf2 | 10 months ago |
Simon | 9ffe2098a5 | 10 months ago |
Simon | 1315e836a4 | 10 months ago |
Simon | 2e4289e75c | 10 months ago |
Simon | 96e73a3a53 | 10 months ago |
Simon | a369be0f4a | 10 months ago |
Simon | d5676e5173 | 10 months ago |
Simon | 44c4cf93e2 | 10 months ago |
Simon | 02ac590caa | 10 months ago |
Simon | a466c02304 | 10 months ago |
Simon | e74c26fe36 | 10 months ago |
Simon | b1267cba83 | 10 months ago |
Simon | 91bb0ed9c0 | 10 months ago |
Simon | 4a145ee7cb | 11 months ago |
Simon | 463019ce5a | 11 months ago |
Simon | 9a9d35cac4 | 11 months ago |
Simon | f41ecd24c5 | 11 months ago |
crocs | eced8200c1 | 11 months ago |
Simon | 669bc6a620 | 11 months ago |
lamusmaser | 37df9b65c7 | 11 months ago |
lamusmaser | 6721d01fa6 | 11 months ago |
crocs | 2b49af9620 | 11 months ago |
Derek Slenk | 2f62898a10 | 11 months ago |
spechter | 832259ce48 | 11 months ago |
Simon | b8ccce250a | 11 months ago |
Simon | aa04ecff4f | 11 months ago |
Simon | dcf97d3d24 | 11 months ago |
crocs | 879ad52b32 | 11 months ago |
Simon | 0bedc3ee93 | 11 months ago |
Simon | 1657c55cbe | 11 months ago |
Simon | 8b1324139d | 11 months ago |
Simon | 04124e3dad | 11 months ago |
Simon | 9c26357f76 | 11 months ago |
extome | 7133d6b441 | 11 months ago |
Simon | 6bc0111d0a | 11 months ago |
Simon | 1188e66f37 | 11 months ago |
Simon | ef6d3e868d | 11 months ago |
Simon | d677f9579e | 11 months ago |
Simon | 0b920e87ae | 11 months ago |
Simon | 4d5aa4ad2f | 11 months ago |
Simon | 4b63c2f536 | 11 months ago |
Simon | 31ad9424f5 | 11 months ago |
Simon | 45f4ccfd93 | 11 months ago |
Simon | 285e2042ae | 11 months ago |
Simon | e4b7f8ce38 | 11 months ago |
Simon | 6892cbbc19 | 11 months ago |
Simon | 58ea256b44 | 11 months ago |
Merlin | aa475c58aa | 11 months ago |
Simon | 8247314d01 | 11 months ago |
Simon | 2826ca4a43 | 11 months ago |
Simon | 64ffc18da7 | 11 months ago |
Simon | 21fde5e068 | 11 months ago |
Simon | ea9ed6c238 | 11 months ago |
Simon | 8eaed07cff | 11 months ago |
Clark | 4d111aff82 | 11 months ago |
Simon | 7236bea29a | 11 months ago |
Simon | 5165c3e34a | 12 months ago |
Simon | 572b23169c | 12 months ago |
Steve Ovens | e1fce06f97 | 12 months ago |
Simon | 446d5b7949 | 12 months ago |
Simon | 17c0310220 | 12 months ago |
Omar Laham | 1b0be84972 | 12 months ago |
Simon | 2df68fa83c | 12 months ago |
Simon | 4184736bee | 12 months ago |
Simon | 81a5f15600 | 12 months ago |
Simon | 4a4a274259 | 12 months ago |
Simon | 0776cea7bc | 12 months ago |
Simon | fb853e6c73 | 1 year ago |
Simon | 57d8b060d2 | 1 year ago |
Simon | 6d1810946b | 1 year ago |
Simon | 88f230c3f4 | 1 year ago |
Simon | e9eddf06fb | 1 year ago |
Simon | 8af7a3caf4 | 1 year ago |
Simon | ad7f1f05b0 | 1 year ago |
Simon | e1fe8d1e29 | 1 year ago |
Simon | f8f01ac27f | 1 year ago |
Simon | 8e79cba7d5 | 1 year ago |
Simon | 87e457401d | 1 year ago |
Simon | bb271e276c | 1 year ago |
Simon | 9967015eba | 1 year ago |
Simon | 3b7e4c9266 | 1 year ago |
Xavier Chevalier | 1dd3fb9341 | 1 year ago |
Simon | 120f9e468d | 1 year ago |
Simon | 88f5c58b8e | 1 year ago |
Simon | 6bd06f61cf | 1 year ago |
Igor Rzegocki | 6a83756fb4 | 1 year ago |
Simon | 515b724047 | 1 year ago |
Simon | 77fef5de57 | 1 year ago |
Simon | 9d09d27fba | 1 year ago |
Simon | 0e767e2f84 | 1 year ago |
Simon | 7801ed0d60 | 1 year ago |
Igor Rzegocki | 6abec9401b | 1 year ago |
Simon | 1cdb9e1ad5 | 1 year ago |
Simon | 7afeb41469 | 1 year ago |
Simon | bae11fe1f1 | 1 year ago |
Simon | 0cacaee213 | 1 year ago |
Simon | dcbd8d2a55 | 1 year ago |
Simon | 892e81c185 | 1 year ago |
Igor Rzegocki | f423ddc53a | 1 year ago |
Igor Rzegocki | b2bb7ea28e | 1 year ago |
Simon | 38b3815a33 | 1 year ago |
Simon | 92975a5c95 | 1 year ago |
Joseph Liu | a5b61bfaf6 | 1 year ago |
Clark | 85b56300b3 | 1 year ago |
Kevin Gibbons | 8fa9e23c6e | 1 year ago |
Simon | a7fc7902f0 | 1 year ago |
Simon | 879f5ab52f | 1 year ago |
Simon | c6458c6ec1 | 1 year ago |
Simon | 47c433e7c1 | 1 year ago |
Simon | dc41e5062d | 1 year ago |
Merlin | 317942b7e1 | 1 year ago |
Merlin | 65d768bf02 | 1 year ago |
Merlin | 0767bbfecf | 1 year ago |
Simon | 78d6699c68 | 1 year ago |
@ -0,0 +1,23 @@
|
||||
name: Frontend Migration
|
||||
description: Tracking our new React based frontend
|
||||
title: "[Frontend Migration]: "
|
||||
labels: ["react migration"]
|
||||
|
||||
body:
|
||||
- type: dropdown
|
||||
id: domain
|
||||
attributes:
|
||||
label: Domain
|
||||
options:
|
||||
- Frontend
|
||||
- Backend
|
||||
- Combined
|
||||
validations:
|
||||
required: true
|
||||
- type: textarea
|
||||
id: description
|
||||
attributes:
|
||||
label: Description
|
||||
placeholder: Organizing our React frontend migration
|
||||
validations:
|
||||
required: true
|
@ -1,16 +1,22 @@
|
||||
name: lint_js
|
||||
|
||||
on: [pull_request, push]
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '**/*.js'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.js'
|
||||
|
||||
jobs:
|
||||
check:
|
||||
name: lint_js
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: '16'
|
||||
node-version: '22'
|
||||
- run: npm ci
|
||||
- run: npm run lint
|
||||
- run: npm run format -- --check
|
||||
|
@ -1,14 +1,42 @@
|
||||
name: lint_python
|
||||
on: [pull_request, push]
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
|
||||
jobs:
|
||||
lint_python:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v4
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc libldap2-dev libsasl2-dev libssl-dev
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.10'
|
||||
- run: pip install --upgrade pip wheel
|
||||
- run: pip install bandit black codespell flake8 flake8-bugbear
|
||||
flake8-comprehensions isort requests
|
||||
- run: ./deploy.sh validate
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install python dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r tubearchivist/requirements-dev.txt
|
||||
|
||||
- name: Run Linter
|
||||
run: ./deploy.sh validate
|
||||
|
@ -0,0 +1,43 @@
|
||||
name: python_unit_tests
|
||||
|
||||
on:
|
||||
push:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
pull_request:
|
||||
paths:
|
||||
- '**/*.py'
|
||||
|
||||
jobs:
|
||||
unit-tests:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Install system dependencies
|
||||
run: |
|
||||
sudo apt-get update
|
||||
sudo apt-get install -y gcc libldap2-dev libsasl2-dev libssl-dev
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
|
||||
- name: Cache pip
|
||||
uses: actions/cache@v4
|
||||
with:
|
||||
path: ~/.cache/pip
|
||||
key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }}
|
||||
restore-keys: |
|
||||
${{ runner.os }}-pip-
|
||||
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r tubearchivist/requirements-dev.txt
|
||||
|
||||
- name: Run unit tests
|
||||
run: pytest tubearchivist
|
@ -0,0 +1,71 @@
|
||||
"""
|
||||
ffmpeg link builder
|
||||
copied as into build step in Dockerfile
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import tarfile
|
||||
import urllib.request
|
||||
from enum import Enum
|
||||
|
||||
API_URL = "https://api.github.com/repos/yt-dlp/FFmpeg-Builds/releases/latest"
|
||||
BINARIES = ["ffmpeg", "ffprobe"]
|
||||
|
||||
|
||||
class PlatformFilter(Enum):
|
||||
"""options"""
|
||||
|
||||
ARM64 = "linuxarm64"
|
||||
AMD64 = "linux64"
|
||||
|
||||
|
||||
def get_assets():
|
||||
"""get all available assets from latest build"""
|
||||
with urllib.request.urlopen(API_URL) as f:
|
||||
all_links = json.loads(f.read().decode("utf-8"))
|
||||
|
||||
return all_links
|
||||
|
||||
|
||||
def pick_url(all_links, platform):
|
||||
"""pick url for platform"""
|
||||
filter_by = PlatformFilter[platform.split("/")[1].upper()].value
|
||||
options = [i for i in all_links["assets"] if filter_by in i["name"]]
|
||||
if not options:
|
||||
raise ValueError(f"no valid asset found for filter {filter_by}")
|
||||
|
||||
url_pick = options[0]["browser_download_url"]
|
||||
|
||||
return url_pick
|
||||
|
||||
|
||||
def download_extract(url):
|
||||
"""download and extract binaries"""
|
||||
print("download file")
|
||||
filename, _ = urllib.request.urlretrieve(url)
|
||||
print("extract file")
|
||||
with tarfile.open(filename, "r:xz") as tar:
|
||||
for member in tar.getmembers():
|
||||
member.name = os.path.basename(member.name)
|
||||
if member.name in BINARIES:
|
||||
print(f"extract {member.name}")
|
||||
tar.extract(member, member.name)
|
||||
|
||||
|
||||
def main():
|
||||
"""entry point"""
|
||||
args = sys.argv
|
||||
if len(args) == 1:
|
||||
platform = "linux/amd64"
|
||||
else:
|
||||
platform = args[1]
|
||||
|
||||
all_links = get_assets()
|
||||
url = pick_url(all_links, platform)
|
||||
download_extract(url)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Channels page is: [here](https://docs.tubearchivist.com/channels/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Downloads page is: [here](https://docs.tubearchivist.com/downloads/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent FAQ page is: [here](https://docs.tubearchivist.com/faq/).
|
@ -1 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent pages are located under *installation* on the left.
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Playlist page is: [here](https://docs.tubearchivist.com/playlists/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Search page is: [here](https://docs.tubearchivist.com/search/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Settings page is: [here](https://docs.tubearchivist.com/settings/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Users page is: [here](https://docs.tubearchivist.com/users/).
|
@ -1,3 +0,0 @@
|
||||
All user documentation has moved to a more flexible, easier to extend and modify documentation platform accessible [here](https://docs.tubearchivist.com) and built from [here](https://github.com/tubearchivist/docs). Don't make any more changes here, keeping this around for some time to keep old links alive.
|
||||
|
||||
Equivalent Video page is: [here](https://docs.tubearchivist.com/video/).
|
@ -0,0 +1,44 @@
|
||||
"""
|
||||
channel fix for update from v0.4.7 to v0.4.8
|
||||
reindex channels with 0 subscriber count
|
||||
python manage.py ta_fix_channels
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand
|
||||
from home.src.es.connect import IndexPaginate
|
||||
from home.tasks import check_reindex
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""fix comment link"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""run command"""
|
||||
self.stdout.write("reindex failed channels")
|
||||
channels = self._get_channels()
|
||||
if not channels:
|
||||
self.stdout.write("did not find any failed channels")
|
||||
return
|
||||
|
||||
self.stdout.write(f"add {len(channels)} channels(s) to queue")
|
||||
to_reindex = {"channel": [i["channel_id"] for i in channels]}
|
||||
check_reindex.delay(data=to_reindex)
|
||||
self.stdout.write(self.style.SUCCESS(" ✓ task queued\n"))
|
||||
|
||||
def _get_channels(self):
|
||||
"""get failed channels"""
|
||||
self.stdout.write("search for failed channels")
|
||||
es_query = {
|
||||
"query": {
|
||||
"bool": {
|
||||
"must": [
|
||||
{"term": {"channel_subs": {"value": 0}}},
|
||||
{"term": {"channel_active": {"value": True}}},
|
||||
]
|
||||
},
|
||||
},
|
||||
"_source": ["channel_id"],
|
||||
}
|
||||
channels = IndexPaginate("ta_channel", es_query).get_results()
|
||||
|
||||
return channels
|
@ -0,0 +1,76 @@
|
||||
"""
|
||||
comment link fix for update from v0.4.7 to v0.4.8
|
||||
scan your videos and comments to fix comment_count field
|
||||
python manage.py ta_fix_comment_link
|
||||
"""
|
||||
|
||||
from django.core.management.base import BaseCommand, CommandError
|
||||
from home.src.es.connect import ElasticWrap, IndexPaginate
|
||||
|
||||
|
||||
class Command(BaseCommand):
|
||||
"""fix comment link"""
|
||||
|
||||
def handle(self, *args, **options):
|
||||
"""run command"""
|
||||
self.stdout.write("run comment link fix")
|
||||
expected_count = self._get_comment_indexed()
|
||||
all_videos = self._get_videos()
|
||||
|
||||
self.stdout.write(f"checking {len(all_videos)} video(s)")
|
||||
videos_updated = []
|
||||
for video in all_videos:
|
||||
video_id = video["youtube_id"]
|
||||
comment_count = expected_count.get(video_id)
|
||||
if not comment_count:
|
||||
continue
|
||||
|
||||
data = {"doc": {"comment_count": comment_count}}
|
||||
path = f"ta_video/_update/{video_id}"
|
||||
response, status_code = ElasticWrap(path).post(data=data)
|
||||
|
||||
if status_code != 200:
|
||||
message = (
|
||||
"failed to add comment count to video"
|
||||
+ f"response code: {status_code}"
|
||||
+ response
|
||||
)
|
||||
raise CommandError(message)
|
||||
|
||||
videos_updated.append(video_id)
|
||||
|
||||
self.stdout.write(f"fixed {len(videos_updated)} video(s)")
|
||||
self.stdout.write(self.style.SUCCESS(" ✓ task completed\n"))
|
||||
|
||||
def _get_comment_indexed(self):
|
||||
"""get comment count by index"""
|
||||
self.stdout.write("get comments")
|
||||
src = "params['_source']['comment_comments'].length"
|
||||
data = {
|
||||
"script_fields": {
|
||||
"comments_length": {
|
||||
"script": {"source": src, "lang": "painless"}
|
||||
}
|
||||
}
|
||||
}
|
||||
all_comments = IndexPaginate(
|
||||
"ta_comment", data=data, keep_source=True
|
||||
).get_results()
|
||||
|
||||
expected_count = {
|
||||
i["_id"]: i["fields"]["comments_length"][0] for i in all_comments
|
||||
}
|
||||
|
||||
return expected_count
|
||||
|
||||
def _get_videos(self):
|
||||
"""get videos without comment_count"""
|
||||
self.stdout.write("get videos")
|
||||
data = {
|
||||
"query": {
|
||||
"bool": {"must_not": [{"exists": {"field": "comment_count"}}]}
|
||||
}
|
||||
}
|
||||
all_videos = IndexPaginate("ta_video", data).get_results()
|
||||
|
||||
return all_videos
|
@ -1,5 +1,7 @@
|
||||
""" handle celery startup """
|
||||
"""start celery app"""
|
||||
|
||||
from .tasks import app as celery_app
|
||||
from __future__ import absolute_import, unicode_literals
|
||||
|
||||
from home.celery import app as celery_app
|
||||
|
||||
__all__ = ("celery_app",)
|
||||
|
@ -0,0 +1,22 @@
|
||||
"""initiate celery"""
|
||||
|
||||
import os
|
||||
|
||||
from celery import Celery
|
||||
from home.src.ta.settings import EnvironmentSettings
|
||||
|
||||
REDIS_HOST = EnvironmentSettings.REDIS_HOST
|
||||
REDIS_PORT = EnvironmentSettings.REDIS_PORT
|
||||
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings")
|
||||
app = Celery(
|
||||
"tasks",
|
||||
broker=f"redis://{REDIS_HOST}:{REDIS_PORT}",
|
||||
backend=f"redis://{REDIS_HOST}:{REDIS_PORT}",
|
||||
result_extended=True,
|
||||
)
|
||||
app.config_from_object(
|
||||
"django.conf:settings", namespace=EnvironmentSettings.REDIS_NAME_SPACE
|
||||
)
|
||||
app.autodiscover_tasks()
|
||||
app.conf.timezone = EnvironmentSettings.TZ
|
@ -0,0 +1,23 @@
|
||||
# Generated by Django 4.2.7 on 2023-12-05 13:47
|
||||
|
||||
from django.db import migrations, models
|
||||
import django.db.models.deletion
|
||||
|
||||
|
||||
class Migration(migrations.Migration):
|
||||
|
||||
dependencies = [
|
||||
('django_celery_beat', '0018_improve_crontab_helptext'),
|
||||
('home', '0001_initial'),
|
||||
]
|
||||
|
||||
operations = [
|
||||
migrations.CreateModel(
|
||||
name='CustomPeriodicTask',
|
||||
fields=[
|
||||
('periodictask_ptr', models.OneToOneField(auto_created=True, on_delete=django.db.models.deletion.CASCADE, parent_link=True, primary_key=True, serialize=False, to='django_celery_beat.periodictask')),
|
||||
('task_config', models.JSONField(default=dict)),
|
||||
],
|
||||
bases=('django_celery_beat.periodictask',),
|
||||
),
|
||||
]
|
@ -1,104 +0,0 @@
|
||||
"""
|
||||
Functionality:
|
||||
- collection of functions and tasks from frontend
|
||||
- called via user input
|
||||
"""
|
||||
|
||||
from home.src.ta.ta_redis import RedisArchivist
|
||||
from home.tasks import run_restore_backup
|
||||
|
||||
|
||||
class PostData:
|
||||
"""
|
||||
map frontend http post values to backend funcs
|
||||
handover long running tasks to celery
|
||||
"""
|
||||
|
||||
def __init__(self, post_dict, current_user):
|
||||
self.post_dict = post_dict
|
||||
self.to_exec, self.exec_val = list(post_dict.items())[0]
|
||||
self.current_user = current_user
|
||||
|
||||
def run_task(self):
|
||||
"""execute and return task result"""
|
||||
to_exec = self.exec_map()
|
||||
task_result = to_exec()
|
||||
return task_result
|
||||
|
||||
def exec_map(self):
|
||||
"""map dict key and return function to execute"""
|
||||
exec_map = {
|
||||
"change_view": self._change_view,
|
||||
"change_grid": self._change_grid,
|
||||
"sort_order": self._sort_order,
|
||||
"hide_watched": self._hide_watched,
|
||||
"show_subed_only": self._show_subed_only,
|
||||
"show_ignored_only": self._show_ignored_only,
|
||||
"db-restore": self._db_restore,
|
||||
}
|
||||
|
||||
return exec_map[self.to_exec]
|
||||
|
||||
def _change_view(self):
|
||||
"""process view changes in home, channel, and downloads"""
|
||||
origin, new_view = self.exec_val.split(":")
|
||||
key = f"{self.current_user}:view:{origin}"
|
||||
print(f"change view: {key} to {new_view}")
|
||||
RedisArchivist().set_message(key, {"status": new_view})
|
||||
return {"success": True}
|
||||
|
||||
def _change_grid(self):
|
||||
"""process change items in grid"""
|
||||
grid_items = int(self.exec_val)
|
||||
grid_items = max(grid_items, 3)
|
||||
grid_items = min(grid_items, 7)
|
||||
|
||||
key = f"{self.current_user}:grid_items"
|
||||
print(f"change grid items: {grid_items}")
|
||||
RedisArchivist().set_message(key, {"status": grid_items})
|
||||
return {"success": True}
|
||||
|
||||
def _sort_order(self):
|
||||
"""change the sort between published to downloaded"""
|
||||
sort_order = {"status": self.exec_val}
|
||||
if self.exec_val in ["asc", "desc"]:
|
||||
RedisArchivist().set_message(
|
||||
f"{self.current_user}:sort_order", sort_order
|
||||
)
|
||||
else:
|
||||
RedisArchivist().set_message(
|
||||
f"{self.current_user}:sort_by", sort_order
|
||||
)
|
||||
return {"success": True}
|
||||
|
||||
def _hide_watched(self):
|
||||
"""toggle if to show watched vids or not"""
|
||||
key = f"{self.current_user}:hide_watched"
|
||||
message = {"status": bool(int(self.exec_val))}
|
||||
print(f"toggle {key}: {message}")
|
||||
RedisArchivist().set_message(key, message)
|
||||
return {"success": True}
|
||||
|
||||
def _show_subed_only(self):
|
||||
"""show or hide subscribed channels only on channels page"""
|
||||
key = f"{self.current_user}:show_subed_only"
|
||||
message = {"status": bool(int(self.exec_val))}
|
||||
print(f"toggle {key}: {message}")
|
||||
RedisArchivist().set_message(key, message)
|
||||
return {"success": True}
|
||||
|
||||
def _show_ignored_only(self):
|
||||
"""switch view on /downloads/ to show ignored only"""
|
||||
show_value = self.exec_val
|
||||
key = f"{self.current_user}:show_ignored_only"
|
||||
value = {"status": show_value}
|
||||
print(f"Filter download view ignored only: {show_value}")
|
||||
RedisArchivist().set_message(key, value)
|
||||
return {"success": True}
|
||||
|
||||
def _db_restore(self):
|
||||
"""restore es zip from settings page"""
|
||||
print("restoring index from backup zip")
|
||||
filename = self.exec_val
|
||||
run_restore_backup.delay(filename)
|
||||
return {"success": True}
|
@ -0,0 +1,101 @@
|
||||
"""
|
||||
Functionality:
|
||||
- handle schedule forms
|
||||
- implement form validation
|
||||
"""
|
||||
|
||||
from celery.schedules import crontab
|
||||
from django import forms
|
||||
from home.src.ta.task_config import TASK_CONFIG
|
||||
|
||||
|
||||
class CrontabValidator:
|
||||
"""validate crontab"""
|
||||
|
||||
@staticmethod
|
||||
def validate_fields(cron_fields):
|
||||
"""expect 3 cron fields"""
|
||||
if not len(cron_fields) == 3:
|
||||
raise forms.ValidationError("expected three cron schedule fields")
|
||||
|
||||
@staticmethod
|
||||
def validate_minute(minute_field):
|
||||
"""expect minute int"""
|
||||
try:
|
||||
minute_value = int(minute_field)
|
||||
if not 0 <= minute_value <= 59:
|
||||
raise forms.ValidationError(
|
||||
"Invalid value for minutes. Must be between 0 and 59."
|
||||
)
|
||||
except ValueError as err:
|
||||
raise forms.ValidationError(
|
||||
"Invalid value for minutes. Must be an integer."
|
||||
) from err
|
||||
|
||||
@staticmethod
|
||||
def validate_cron_tab(minute, hour, day_of_week):
|
||||
"""check if crontab can be created"""
|
||||
try:
|
||||
crontab(minute=minute, hour=hour, day_of_week=day_of_week)
|
||||
except ValueError as err:
|
||||
raise forms.ValidationError(f"invalid crontab: {err}") from err
|
||||
|
||||
def validate(self, cron_expression):
|
||||
"""create crontab schedule"""
|
||||
if cron_expression == "auto":
|
||||
return
|
||||
|
||||
cron_fields = cron_expression.split()
|
||||
self.validate_fields(cron_fields)
|
||||
|
||||
minute, hour, day_of_week = cron_fields
|
||||
self.validate_minute(minute)
|
||||
self.validate_cron_tab(minute, hour, day_of_week)
|
||||
|
||||
|
||||
def validate_cron(cron_expression):
|
||||
"""callable for field"""
|
||||
CrontabValidator().validate(cron_expression)
|
||||
|
||||
|
||||
class SchedulerSettingsForm(forms.Form):
|
||||
"""handle scheduler settings"""
|
||||
|
||||
update_subscribed = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
download_pending = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
check_reindex = forms.CharField(required=False, validators=[validate_cron])
|
||||
check_reindex_days = forms.IntegerField(required=False)
|
||||
thumbnail_check = forms.CharField(
|
||||
required=False, validators=[validate_cron]
|
||||
)
|
||||
run_backup = forms.CharField(required=False, validators=[validate_cron])
|
||||
run_backup_rotate = forms.IntegerField(required=False)
|
||||
|
||||
|
||||
class NotificationSettingsForm(forms.Form):
|
||||
"""add notification URL"""
|
||||
|
||||
SUPPORTED_TASKS = [
|
||||
"update_subscribed",
|
||||
"extract_download",
|
||||
"download_pending",
|
||||
"check_reindex",
|
||||
]
|
||||
TASK_LIST = [(i, TASK_CONFIG[i]["title"]) for i in SUPPORTED_TASKS]
|
||||
|
||||
TASK_CHOICES = [("", "-- select task --")]
|
||||
TASK_CHOICES.extend(TASK_LIST)
|
||||
|
||||
PLACEHOLDER = "Apprise notification URL"
|
||||
|
||||
task = forms.ChoiceField(
|
||||
widget=forms.Select, choices=TASK_CHOICES, required=False
|
||||
)
|
||||
notification_url = forms.CharField(
|
||||
required=False,
|
||||
widget=forms.TextInput(attrs={"placeholder": PLACEHOLDER}),
|
||||
)
|
@ -0,0 +1,10 @@
|
||||
from django.conf import settings
|
||||
from django.contrib.auth.middleware import PersistentRemoteUserMiddleware
|
||||
|
||||
|
||||
class HttpRemoteUserMiddleware(PersistentRemoteUserMiddleware):
|
||||
"""This class allows authentication via HTTP_REMOTE_USER which is set for
|
||||
example by certain SSO applications.
|
||||
"""
|
||||
|
||||
header = settings.TA_AUTH_PROXY_USERNAME_HEADER
|
@ -0,0 +1,93 @@
|
||||
"""
|
||||
Functionality:
|
||||
- Handle scheduler config update
|
||||
"""
|
||||
|
||||
from datetime import datetime
|
||||
|
||||
from django.utils import dateformat
|
||||
from django_celery_beat.models import CrontabSchedule
|
||||
from home.models import CustomPeriodicTask
|
||||
from home.src.ta.config import AppConfig
|
||||
from home.src.ta.settings import EnvironmentSettings
|
||||
from home.src.ta.task_config import TASK_CONFIG
|
||||
|
||||
|
||||
class ScheduleBuilder:
|
||||
"""build schedule dicts for beat"""
|
||||
|
||||
SCHEDULES = {
|
||||
"update_subscribed": "0 8 *",
|
||||
"download_pending": "0 16 *",
|
||||
"check_reindex": "0 12 *",
|
||||
"thumbnail_check": "0 17 *",
|
||||
"run_backup": "0 18 0",
|
||||
"version_check": "0 11 *",
|
||||
}
|
||||
CONFIG = {
|
||||
"check_reindex_days": "check_reindex",
|
||||
"run_backup_rotate": "run_backup",
|
||||
"update_subscribed_notify": "update_subscribed",
|
||||
"download_pending_notify": "download_pending",
|
||||
"check_reindex_notify": "check_reindex",
|
||||
}
|
||||
MSG = "message:setting"
|
||||
|
||||
def __init__(self):
|
||||
self.config = AppConfig().config
|
||||
|
||||
def update_schedule_conf(self, form_post):
|
||||
"""process form post, schedules need to be validated before"""
|
||||
for key, value in form_post.items():
|
||||
if not value:
|
||||
continue
|
||||
|
||||
if key in self.SCHEDULES:
|
||||
if value == "auto":
|
||||
value = self.SCHEDULES.get(key)
|
||||
|
||||
_ = self.get_set_task(key, value)
|
||||
continue
|
||||
|
||||
if key in self.CONFIG:
|
||||
self.set_config(key, value)
|
||||
|
||||
def get_set_task(self, task_name, schedule=False):
|
||||
"""get task"""
|
||||
try:
|
||||
task = CustomPeriodicTask.objects.get(name=task_name)
|
||||
except CustomPeriodicTask.DoesNotExist:
|
||||
description = TASK_CONFIG[task_name].get("title")
|
||||
task = CustomPeriodicTask(
|
||||
name=task_name,
|
||||
task=task_name,
|
||||
description=description,
|
||||
)
|
||||
|
||||
if schedule:
|
||||
task_crontab = self.get_set_cron_tab(schedule)
|
||||
task.crontab = task_crontab
|
||||
task.last_run_at = dateformat.make_aware(datetime.now())
|
||||
task.save()
|
||||
|
||||
return task
|
||||
|
||||
@staticmethod
|
||||
def get_set_cron_tab(schedule):
|
||||
"""needs to be validated before"""
|
||||
kwargs = dict(zip(["minute", "hour", "day_of_week"], schedule.split()))
|
||||
kwargs.update({"timezone": EnvironmentSettings.TZ})
|
||||
crontab, _ = CrontabSchedule.objects.get_or_create(**kwargs)
|
||||
|
||||
return crontab
|
||||
|
||||
def set_config(self, key, value):
|
||||
"""set task_config"""
|
||||
task_name = self.CONFIG.get(key)
|
||||
if not task_name:
|
||||
raise ValueError("invalid config key")
|
||||
|
||||
task = CustomPeriodicTask.objects.get(name=task_name)
|
||||
config_key = key.split(f"{task_name}_")[-1]
|
||||
task.task_config.update({config_key: value})
|
||||
task.save()
|
@ -0,0 +1,11 @@
|
||||
from django.http import HttpResponse
|
||||
|
||||
|
||||
class HealthCheckMiddleware:
|
||||
def __init__(self, get_response):
|
||||
self.get_response = get_response
|
||||
|
||||
def __call__(self, request):
|
||||
if request.path == "/health":
|
||||
return HttpResponse("ok")
|
||||
return self.get_response(request)
|
@ -1,55 +1,141 @@
|
||||
"""send notifications using apprise"""
|
||||
|
||||
import apprise
|
||||
from home.src.ta.config import AppConfig
|
||||
from home.src.es.connect import ElasticWrap
|
||||
from home.src.ta.task_config import TASK_CONFIG
|
||||
from home.src.ta.task_manager import TaskManager
|
||||
|
||||
|
||||
class Notifications:
|
||||
"""notification handler"""
|
||||
"""store notifications in ES"""
|
||||
|
||||
def __init__(self, name: str, task_id: str, task_title: str):
|
||||
self.name: str = name
|
||||
self.task_id: str = task_id
|
||||
self.task_title: str = task_title
|
||||
GET_PATH = "ta_config/_doc/notify"
|
||||
UPDATE_PATH = "ta_config/_update/notify/"
|
||||
|
||||
def send(self) -> None:
|
||||
def __init__(self, task_name: str):
|
||||
self.task_name = task_name
|
||||
|
||||
def send(self, task_id: str, task_title: str) -> None:
|
||||
"""send notifications"""
|
||||
apobj = apprise.Apprise()
|
||||
hooks: str | None = self.get_url()
|
||||
if not hooks:
|
||||
urls: list[str] = self.get_urls()
|
||||
if not urls:
|
||||
return
|
||||
|
||||
hook_list: list[str] = self.parse_hooks(hooks=hooks)
|
||||
title, body = self.build_message()
|
||||
title, body = self._build_message(task_id, task_title)
|
||||
|
||||
if not body:
|
||||
return
|
||||
|
||||
for hook in hook_list:
|
||||
apobj.add(hook)
|
||||
for url in urls:
|
||||
apobj.add(url)
|
||||
|
||||
apobj.notify(body=body, title=title)
|
||||
|
||||
def get_url(self) -> str | None:
|
||||
"""get apprise urls for task"""
|
||||
config = AppConfig().config
|
||||
hooks: str = config["scheduler"].get(f"{self.name}_notify")
|
||||
|
||||
return hooks
|
||||
|
||||
def parse_hooks(self, hooks: str) -> list[str]:
|
||||
"""create list of hooks"""
|
||||
|
||||
hook_list: list[str] = [i.strip() for i in hooks.split()]
|
||||
|
||||
return hook_list
|
||||
|
||||
def build_message(self) -> tuple[str, str | None]:
|
||||
def _build_message(
|
||||
self, task_id: str, task_title: str
|
||||
) -> tuple[str, str | None]:
|
||||
"""build message to send notification"""
|
||||
task = TaskManager().get_task(self.task_id)
|
||||
task = TaskManager().get_task(task_id)
|
||||
status = task.get("status")
|
||||
title: str = f"[TA] {self.task_title} process ended with {status}"
|
||||
title: str = f"[TA] {task_title} process ended with {status}"
|
||||
body: str | None = task.get("result")
|
||||
|
||||
return title, body
|
||||
|
||||
def get_urls(self) -> list[str]:
|
||||
"""get stored urls for task"""
|
||||
response, code = ElasticWrap(self.GET_PATH).get(print_error=False)
|
||||
if not code == 200:
|
||||
return []
|
||||
|
||||
urls = response["_source"].get(self.task_name, [])
|
||||
|
||||
return urls
|
||||
|
||||
def add_url(self, url: str) -> None:
|
||||
"""add url to task notification"""
|
||||
source = (
|
||||
"if (!ctx._source.containsKey(params.task_name)) "
|
||||
+ "{ctx._source[params.task_name] = [params.url]} "
|
||||
+ "else if (!ctx._source[params.task_name].contains(params.url)) "
|
||||
+ "{ctx._source[params.task_name].add(params.url)} "
|
||||
+ "else {ctx.op = 'none'}"
|
||||
)
|
||||
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"url": url, "task_name": self.task_name},
|
||||
},
|
||||
"upsert": {self.task_name: [url]},
|
||||
}
|
||||
|
||||
_, _ = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
|
||||
def remove_url(self, url: str) -> tuple[dict, int]:
|
||||
"""remove url from task"""
|
||||
source = (
|
||||
"if (ctx._source.containsKey(params.task_name) "
|
||||
+ "&& ctx._source[params.task_name].contains(params.url)) "
|
||||
+ "{ctx._source[params.task_name]."
|
||||
+ "remove(ctx._source[params.task_name].indexOf(params.url))}"
|
||||
)
|
||||
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"url": url, "task_name": self.task_name},
|
||||
}
|
||||
}
|
||||
|
||||
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
if not self.get_urls():
|
||||
_, _ = self.remove_task()
|
||||
|
||||
return response, status_code
|
||||
|
||||
def remove_task(self) -> tuple[dict, int]:
|
||||
"""remove all notifications from task"""
|
||||
source = (
|
||||
"if (ctx._source.containsKey(params.task_name)) "
|
||||
+ "{ctx._source.remove(params.task_name)}"
|
||||
)
|
||||
data = {
|
||||
"script": {
|
||||
"source": source,
|
||||
"lang": "painless",
|
||||
"params": {"task_name": self.task_name},
|
||||
}
|
||||
}
|
||||
|
||||
response, status_code = ElasticWrap(self.UPDATE_PATH).post(data)
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def get_all_notifications() -> dict[str, list[str]]:
|
||||
"""get all notifications stored"""
|
||||
path = "ta_config/_doc/notify"
|
||||
response, status_code = ElasticWrap(path).get(print_error=False)
|
||||
if not status_code == 200:
|
||||
return {}
|
||||
|
||||
notifications: dict = {}
|
||||
source = response.get("_source")
|
||||
if not source:
|
||||
return notifications
|
||||
|
||||
for task_id, urls in source.items():
|
||||
notifications.update(
|
||||
{
|
||||
task_id: {
|
||||
"urls": urls,
|
||||
"title": TASK_CONFIG[task_id]["title"],
|
||||
}
|
||||
}
|
||||
)
|
||||
|
||||
return notifications
|
||||
|
@ -0,0 +1,95 @@
|
||||
"""
|
||||
Functionality:
|
||||
- read and write application config backed by ES
|
||||
- encapsulate persistence of application properties
|
||||
"""
|
||||
|
||||
from os import environ
|
||||
|
||||
|
||||
class EnvironmentSettings:
|
||||
"""
|
||||
Handle settings for the application that are driven from the environment.
|
||||
These will not change when the user is using the application.
|
||||
These settings are only provided only on startup.
|
||||
"""
|
||||
|
||||
HOST_UID: int = int(environ.get("HOST_UID", False))
|
||||
HOST_GID: int = int(environ.get("HOST_GID", False))
|
||||
ENABLE_CAST: bool = bool(environ.get("ENABLE_CAST"))
|
||||
TZ: str = str(environ.get("TZ", "UTC"))
|
||||
TA_PORT: int = int(environ.get("TA_PORT", False))
|
||||
TA_UWSGI_PORT: int = int(environ.get("TA_UWSGI_PORT", False))
|
||||
TA_USERNAME: str = str(environ.get("TA_USERNAME"))
|
||||
TA_PASSWORD: str = str(environ.get("TA_PASSWORD"))
|
||||
|
||||
# Application Paths
|
||||
MEDIA_DIR: str = str(environ.get("TA_MEDIA_DIR", "/youtube"))
|
||||
APP_DIR: str = str(environ.get("TA_APP_DIR", "/app"))
|
||||
CACHE_DIR: str = str(environ.get("TA_CACHE_DIR", "/cache"))
|
||||
|
||||
# Redis
|
||||
REDIS_HOST: str = str(environ.get("REDIS_HOST"))
|
||||
REDIS_PORT: int = int(environ.get("REDIS_PORT", 6379))
|
||||
REDIS_NAME_SPACE: str = str(environ.get("REDIS_NAME_SPACE", "ta:"))
|
||||
|
||||
# ElasticSearch
|
||||
ES_URL: str = str(environ.get("ES_URL"))
|
||||
ES_PASS: str = str(environ.get("ELASTIC_PASSWORD"))
|
||||
ES_USER: str = str(environ.get("ELASTIC_USER", "elastic"))
|
||||
ES_SNAPSHOT_DIR: str = str(
|
||||
environ.get(
|
||||
"ES_SNAPSHOT_DIR", "/usr/share/elasticsearch/data/snapshot"
|
||||
)
|
||||
)
|
||||
ES_DISABLE_VERIFY_SSL: bool = bool(environ.get("ES_DISABLE_VERIFY_SSL"))
|
||||
|
||||
def print_generic(self):
|
||||
"""print generic env vars"""
|
||||
print(
|
||||
f"""
|
||||
HOST_UID: {self.HOST_UID}
|
||||
HOST_GID: {self.HOST_GID}
|
||||
TZ: {self.TZ}
|
||||
ENABLE_CAST: {self.ENABLE_CAST}
|
||||
TA_PORT: {self.TA_PORT}
|
||||
TA_UWSGI_PORT: {self.TA_UWSGI_PORT}
|
||||
TA_USERNAME: {self.TA_USERNAME}
|
||||
TA_PASSWORD: *****"""
|
||||
)
|
||||
|
||||
def print_paths(self):
|
||||
"""debug paths set"""
|
||||
print(
|
||||
f"""
|
||||
MEDIA_DIR: {self.MEDIA_DIR}
|
||||
APP_DIR: {self.APP_DIR}
|
||||
CACHE_DIR: {self.CACHE_DIR}"""
|
||||
)
|
||||
|
||||
def print_redis_conf(self):
|
||||
"""debug redis conf paths"""
|
||||
print(
|
||||
f"""
|
||||
REDIS_HOST: {self.REDIS_HOST}
|
||||
REDIS_PORT: {self.REDIS_PORT}
|
||||
REDIS_NAME_SPACE: {self.REDIS_NAME_SPACE}"""
|
||||
)
|
||||
|
||||
def print_es_paths(self):
|
||||
"""debug es conf"""
|
||||
print(
|
||||
f"""
|
||||
ES_URL: {self.ES_URL}
|
||||
ES_PASS: *****
|
||||
ES_USER: {self.ES_USER}
|
||||
ES_SNAPSHOT_DIR: {self.ES_SNAPSHOT_DIR}
|
||||
ES_DISABLE_VERIFY_SSL: {self.ES_DISABLE_VERIFY_SSL}"""
|
||||
)
|
||||
|
||||
def print_all(self):
|
||||
"""print all"""
|
||||
self.print_generic()
|
||||
self.print_paths()
|
||||
self.print_redis_conf()
|
||||
self.print_es_paths()
|
@ -0,0 +1,125 @@
|
||||
"""
|
||||
Functionality:
|
||||
- Static Task config values
|
||||
- Type definitions
|
||||
- separate to avoid circular imports
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
|
||||
class TaskItemConfig(TypedDict):
|
||||
"""describes a task item config"""
|
||||
|
||||
title: str
|
||||
group: str
|
||||
api_start: bool
|
||||
api_stop: bool
|
||||
|
||||
|
||||
UPDATE_SUBSCRIBED: TaskItemConfig = {
|
||||
"title": "Rescan your Subscriptions",
|
||||
"group": "download:scan",
|
||||
"api_start": True,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
DOWNLOAD_PENDING: TaskItemConfig = {
|
||||
"title": "Downloading",
|
||||
"group": "download:run",
|
||||
"api_start": True,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
EXTRACT_DOWNLOAD: TaskItemConfig = {
|
||||
"title": "Add to download queue",
|
||||
"group": "download:add",
|
||||
"api_start": False,
|
||||
"api_stop": True,
|
||||
}
|
||||
|
||||
CHECK_REINDEX: TaskItemConfig = {
|
||||
"title": "Reindex Documents",
|
||||
"group": "reindex:run",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
MANUAL_IMPORT: TaskItemConfig = {
|
||||
"title": "Manual video import",
|
||||
"group": "setting:import",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RUN_BACKUP: TaskItemConfig = {
|
||||
"title": "Index Backup",
|
||||
"group": "setting:backup",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESTORE_BACKUP: TaskItemConfig = {
|
||||
"title": "Restore Backup",
|
||||
"group": "setting:restore",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESCAN_FILESYSTEM: TaskItemConfig = {
|
||||
"title": "Rescan your Filesystem",
|
||||
"group": "setting:filesystemscan",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
THUMBNAIL_CHECK: TaskItemConfig = {
|
||||
"title": "Check your Thumbnails",
|
||||
"group": "setting:thumbnailcheck",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
RESYNC_THUMBS: TaskItemConfig = {
|
||||
"title": "Sync Thumbnails to Media Files",
|
||||
"group": "setting:thumbnailsync",
|
||||
"api_start": True,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
INDEX_PLAYLISTS: TaskItemConfig = {
|
||||
"title": "Index Channel Playlist",
|
||||
"group": "channel:indexplaylist",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
SUBSCRIBE_TO: TaskItemConfig = {
|
||||
"title": "Add Subscription",
|
||||
"group": "subscription:add",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
VERSION_CHECK: TaskItemConfig = {
|
||||
"title": "Look for new Version",
|
||||
"group": "",
|
||||
"api_start": False,
|
||||
"api_stop": False,
|
||||
}
|
||||
|
||||
TASK_CONFIG: dict[str, TaskItemConfig] = {
|
||||
"update_subscribed": UPDATE_SUBSCRIBED,
|
||||
"download_pending": DOWNLOAD_PENDING,
|
||||
"extract_download": EXTRACT_DOWNLOAD,
|
||||
"check_reindex": CHECK_REINDEX,
|
||||
"manual_import": MANUAL_IMPORT,
|
||||
"run_backup": RUN_BACKUP,
|
||||
"restore_backup": RESTORE_BACKUP,
|
||||
"rescan_filesystem": RESCAN_FILESYSTEM,
|
||||
"thumbnail_check": THUMBNAIL_CHECK,
|
||||
"resync_thumbs": RESYNC_THUMBS,
|
||||
"index_playlists": INDEX_PLAYLISTS,
|
||||
"subscribe_to": SUBSCRIBE_TO,
|
||||
"version_check": VERSION_CHECK,
|
||||
}
|
@ -0,0 +1,142 @@
|
||||
"""
|
||||
Functionality:
|
||||
- read and write user config backed by ES
|
||||
- encapsulate persistence of user properties
|
||||
"""
|
||||
|
||||
from typing import TypedDict
|
||||
|
||||
from home.src.es.connect import ElasticWrap
|
||||
from home.src.ta.helper import get_stylesheets
|
||||
|
||||
|
||||
class UserConfigType(TypedDict, total=False):
|
||||
"""describes the user configuration"""
|
||||
|
||||
stylesheet: str
|
||||
page_size: int
|
||||
sort_by: str
|
||||
sort_order: str
|
||||
view_style_home: str
|
||||
view_style_channel: str
|
||||
view_style_downloads: str
|
||||
view_style_playlist: str
|
||||
grid_items: int
|
||||
hide_watched: bool
|
||||
show_ignored_only: bool
|
||||
show_subed_only: bool
|
||||
sponsorblock_id: str
|
||||
|
||||
|
||||
class UserConfig:
|
||||
"""Handle settings for an individual user"""
|
||||
|
||||
_DEFAULT_USER_SETTINGS = UserConfigType(
|
||||
stylesheet="dark.css",
|
||||
page_size=12,
|
||||
sort_by="published",
|
||||
sort_order="desc",
|
||||
view_style_home="grid",
|
||||
view_style_channel="list",
|
||||
view_style_downloads="list",
|
||||
view_style_playlist="grid",
|
||||
grid_items=3,
|
||||
hide_watched=False,
|
||||
show_ignored_only=False,
|
||||
show_subed_only=False,
|
||||
sponsorblock_id=None,
|
||||
)
|
||||
|
||||
VALID_STYLESHEETS = get_stylesheets()
|
||||
VALID_VIEW_STYLE = ["grid", "list"]
|
||||
VALID_SORT_ORDER = ["asc", "desc"]
|
||||
VALID_SORT_BY = [
|
||||
"published",
|
||||
"downloaded",
|
||||
"views",
|
||||
"likes",
|
||||
"duration",
|
||||
"filesize",
|
||||
]
|
||||
VALID_GRID_ITEMS = range(3, 8)
|
||||
|
||||
def __init__(self, user_id: str):
|
||||
self._user_id: str = user_id
|
||||
self._config: UserConfigType = self.get_config()
|
||||
|
||||
def get_value(self, key: str):
|
||||
"""Get the given key from the users configuration
|
||||
|
||||
Throws a KeyError if the requested Key is not a permitted value"""
|
||||
if key not in self._DEFAULT_USER_SETTINGS:
|
||||
raise KeyError(f"Unable to read config for unknown key '{key}'")
|
||||
|
||||
return self._config.get(key) or self._DEFAULT_USER_SETTINGS.get(key)
|
||||
|
||||
def set_value(self, key: str, value: str | bool | int):
|
||||
"""Set or replace a configuration value for the user"""
|
||||
self._validate(key, value)
|
||||
old = self.get_value(key)
|
||||
self._config[key] = value
|
||||
|
||||
# Upsert this property (creating a record if not exists)
|
||||
es_payload = {"doc": {"config": {key: value}}, "doc_as_upsert": True}
|
||||
es_document_path = f"ta_config/_update/user_{self._user_id}"
|
||||
response, status = ElasticWrap(es_document_path).post(es_payload)
|
||||
if status < 200 or status > 299:
|
||||
raise ValueError(f"Failed storing user value {status}: {response}")
|
||||
|
||||
print(f"User {self._user_id} value '{key}' change: {old} -> {value}")
|
||||
|
||||
def _validate(self, key, value):
|
||||
"""validate key and value"""
|
||||
if not self._user_id:
|
||||
raise ValueError("Unable to persist config for null user_id")
|
||||
|
||||
if key not in self._DEFAULT_USER_SETTINGS:
|
||||
raise KeyError(
|
||||
f"Unable to persist config for an unknown key '{key}'"
|
||||
)
|
||||
|
||||
valid_values = {
|
||||
"stylesheet": self.VALID_STYLESHEETS,
|
||||
"sort_by": self.VALID_SORT_BY,
|
||||
"sort_order": self.VALID_SORT_ORDER,
|
||||
"view_style_home": self.VALID_VIEW_STYLE,
|
||||
"view_style_channel": self.VALID_VIEW_STYLE,
|
||||
"view_style_download": self.VALID_VIEW_STYLE,
|
||||
"view_style_playlist": self.VALID_VIEW_STYLE,
|
||||
"grid_items": self.VALID_GRID_ITEMS,
|
||||
"page_size": int,
|
||||
"hide_watched": bool,
|
||||
"show_ignored_only": bool,
|
||||
"show_subed_only": bool,
|
||||
}
|
||||
validation_value = valid_values.get(key)
|
||||
|
||||
if isinstance(validation_value, (list, range)):
|
||||
if value not in validation_value:
|
||||
raise ValueError(f"Invalid value for {key}: {value}")
|
||||
elif validation_value == int:
|
||||
if not isinstance(value, int):
|
||||
raise ValueError(f"Invalid value for {key}: {value}")
|
||||
elif validation_value == bool:
|
||||
if not isinstance(value, bool):
|
||||
raise ValueError(f"Invalid value for {key}: {value}")
|
||||
|
||||
def get_config(self) -> UserConfigType:
|
||||
"""get config from ES or load from the application defaults"""
|
||||
if not self._user_id:
|
||||
# this is for a non logged-in user so use all the defaults
|
||||
return {}
|
||||
|
||||
# Does this user have configuration stored in ES
|
||||
es_document_path = f"ta_config/_doc/user_{self._user_id}"
|
||||
response, status = ElasticWrap(es_document_path).get(print_error=False)
|
||||
if status == 200 and "_source" in response.keys():
|
||||
source = response.get("_source")
|
||||
if "config" in source.keys():
|
||||
return source.get("config")
|
||||
|
||||
# There is no config in ES
|
||||
return {}
|
@ -0,0 +1,8 @@
|
||||
from django import template
|
||||
|
||||
register = template.Library()
|
||||
|
||||
|
||||
@register.filter(name="has_group")
|
||||
def has_group(user, group_name):
|
||||
return user.groups.filter(name=group_name).exists()
|
@ -0,0 +1,11 @@
|
||||
"""test configs"""
|
||||
|
||||
import os
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
@pytest.fixture(scope="session", autouse=True)
|
||||
def change_test_dir(request):
|
||||
"""change directory to project folder"""
|
||||
os.chdir(request.config.rootdir / "tubearchivist")
|
@ -0,0 +1,113 @@
|
||||
"""tests for helper functions"""
|
||||
|
||||
import pytest
|
||||
from home.src.ta.helper import (
|
||||
date_parser,
|
||||
get_duration_str,
|
||||
get_mapping,
|
||||
is_shorts,
|
||||
randomizor,
|
||||
time_parser,
|
||||
)
|
||||
|
||||
|
||||
def test_randomizor_with_positive_length():
|
||||
"""test randomizer"""
|
||||
length = 10
|
||||
result = randomizor(length)
|
||||
assert len(result) == length
|
||||
assert result.isalnum()
|
||||
|
||||
|
||||
def test_date_parser_with_int():
|
||||
"""unix timestamp"""
|
||||
timestamp = 1621539600
|
||||
expected_date = "2021-05-20"
|
||||
assert date_parser(timestamp) == expected_date
|
||||
|
||||
|
||||
def test_date_parser_with_str():
|
||||
"""iso timestamp"""
|
||||
date_str = "2021-05-21"
|
||||
expected_date = "2021-05-21"
|
||||
assert date_parser(date_str) == expected_date
|
||||
|
||||
|
||||
def test_date_parser_with_invalid_input():
|
||||
"""invalid type"""
|
||||
invalid_input = [1621539600]
|
||||
with pytest.raises(TypeError):
|
||||
date_parser(invalid_input)
|
||||
|
||||
|
||||
def test_date_parser_with_invalid_string_format():
|
||||
"""invalid date string"""
|
||||
invalid_date_str = "21/05/2021"
|
||||
with pytest.raises(ValueError):
|
||||
date_parser(invalid_date_str)
|
||||
|
||||
|
||||
def test_time_parser_with_numeric_string():
|
||||
"""as number"""
|
||||
timestamp = "100"
|
||||
expected_seconds = 100
|
||||
assert time_parser(timestamp) == expected_seconds
|
||||
|
||||
|
||||
def test_time_parser_with_hh_mm_ss_format():
|
||||
"""to seconds"""
|
||||
timestamp = "01:00:00"
|
||||
expected_seconds = 3600.0
|
||||
assert time_parser(timestamp) == expected_seconds
|
||||
|
||||
|
||||
def test_time_parser_with_empty_string():
|
||||
"""handle empty"""
|
||||
timestamp = ""
|
||||
assert time_parser(timestamp) is False
|
||||
|
||||
|
||||
def test_time_parser_with_invalid_format():
|
||||
"""not enough to unpack"""
|
||||
timestamp = "01:00"
|
||||
with pytest.raises(ValueError):
|
||||
time_parser(timestamp)
|
||||
|
||||
|
||||
def test_time_parser_with_non_numeric_input():
|
||||
"""non numeric"""
|
||||
timestamp = "1a:00:00"
|
||||
with pytest.raises(ValueError):
|
||||
time_parser(timestamp)
|
||||
|
||||
|
||||
def test_get_mapping():
|
||||
"""test mappint"""
|
||||
index_config = get_mapping()
|
||||
assert isinstance(index_config, list)
|
||||
assert all(isinstance(i, dict) for i in index_config)
|
||||
|
||||
|
||||
def test_is_shorts():
|
||||
"""is shorts id"""
|
||||
youtube_id = "YG3-Pw3rixU"
|
||||
assert is_shorts(youtube_id)
|
||||
|
||||
|
||||
def test_is_not_shorts():
|
||||
"""is not shorts id"""
|
||||
youtube_id = "Ogr9kbypSNg"
|
||||
assert is_shorts(youtube_id) is False
|
||||
|
||||
|
||||
def test_get_duration_str():
|
||||
"""only seconds"""
|
||||
assert get_duration_str(None) == "NA"
|
||||
assert get_duration_str(5) == "5s"
|
||||
assert get_duration_str(10) == "10s"
|
||||
assert get_duration_str(500) == "8m 20s"
|
||||
assert get_duration_str(1000) == "16m 40s"
|
||||
assert get_duration_str(5000) == "1h 23m 20s"
|
||||
assert get_duration_str(500000) == "5d 18h 53m 20s"
|
||||
assert get_duration_str(5000000) == "57d 20h 53m 20s"
|
||||
assert get_duration_str(50000000) == "1y 213d 16h 53m 20s"
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue