commit 7523a19d1f2bcf6afa4e54f4e726ec7601189edd
Author: Daniel Seiller
Date: Sun Aug 29 15:03:28 2021 +0200
Initial commit
diff --git a/.gitignore b/.gitignore
new file mode 100644
index 0000000..110ba6d
--- /dev/null
+++ b/.gitignore
@@ -0,0 +1,149 @@
+# Ignore dynaconf secret files
+.secrets.*
+
+# ---> Python
+# Byte-compiled / optimized / DLL files
+__pycache__/
+*.py[cod]
+*$py.class
+
+# C extensions
+*.so
+
+# Distribution / packaging
+.Python
+build/
+develop-eggs/
+dist/
+downloads/
+eggs/
+.eggs/
+lib/
+lib64/
+parts/
+sdist/
+var/
+wheels/
+share/python-wheels/
+*.egg-info/
+.installed.cfg
+*.egg
+MANIFEST
+
+# PyInstaller
+# Usually these files are written by a python script from a template
+# before PyInstaller builds the exe, so as to inject date/other infos into it.
+*.manifest
+*.spec
+
+# Installer logs
+pip-log.txt
+pip-delete-this-directory.txt
+
+# Unit test / coverage reports
+htmlcov/
+.tox/
+.nox/
+.coverage
+.coverage.*
+.cache
+nosetests.xml
+coverage.xml
+*.cover
+*.py,cover
+.hypothesis/
+.pytest_cache/
+cover/
+
+# Translations
+*.mo
+*.pot
+
+# Django stuff:
+*.log
+local_settings.py
+db.sqlite3
+db.sqlite3-journal
+
+# Flask stuff:
+instance/
+.webassets-cache
+
+# Scrapy stuff:
+.scrapy
+
+# Sphinx documentation
+docs/_build/
+
+# PyBuilder
+.pybuilder/
+target/
+
+# Jupyter Notebook
+.ipynb_checkpoints
+
+# IPython
+profile_default/
+ipython_config.py
+
+# pyenv
+# For a library or package, you might want to ignore these files since the code is
+# intended to run in multiple environments; otherwise, check them in:
+# .python-version
+
+# pipenv
+# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
+# However, in case of collaboration, if having platform-specific dependencies or dependencies
+# having no cross-platform support, pipenv may install dependencies that don't work, or not
+# install all needed dependencies.
+#Pipfile.lock
+
+# PEP 582; used by e.g. github.com/David-OConnor/pyflow
+__pypackages__/
+
+# Celery stuff
+celerybeat-schedule
+celerybeat.pid
+
+# SageMath parsed files
+*.sage.py
+
+# Environments
+.env
+.venv
+env/
+venv/
+ENV/
+env.bak/
+venv.bak/
+
+# Spyder project settings
+.spyderproject
+.spyproject
+
+# Rope project settings
+.ropeproject
+
+# mkdocs documentation
+/site
+
+# mypy
+.mypy_cache/
+.dmypy.json
+dmypy.json
+
+# Pyre type checker
+.pyre/
+
+# pytype static type analyzer
+.pytype/
+
+# Cython debug symbols
+cython_debug/
+
+stats.json
+stats_temp.json
+config.json
+Mediadash.db
+.history
+.vscode
\ No newline at end of file
diff --git a/README.md b/README.md
new file mode 100644
index 0000000..ae2a5c4
--- /dev/null
+++ b/README.md
@@ -0,0 +1,3 @@
+# Media Server Dashboard
+
+WIP
\ No newline at end of file
diff --git a/TODO.md b/TODO.md
new file mode 100644
index 0000000..609a688
--- /dev/null
+++ b/TODO.md
@@ -0,0 +1,8 @@
+- Jellyfin integration (?)
+- Webhooks for transcode queue
+- Webhook event log
+- Database models
+- Container details
+- Transcode Job queue
+- Transcode profile editor
+- DB Models
\ No newline at end of file
diff --git a/api.py b/api.py
new file mode 100644
index 0000000..356b497
--- /dev/null
+++ b/api.py
@@ -0,0 +1,649 @@
+import requests as RQ
+from requests.auth import HTTPBasicAuth
+from urllib.parse import urljoin, urlparse
+from fabric import Connection
+import time
+import json
+import base64
+import io
+from datetime import datetime,timedelta
+from sshpubkeys import AuthorizedKeysFile
+from utils import genpw,handle_config
+from pprint import pprint
+
+"""NOTES
+http://192.168.2.25:8080/sonarr/api/v3/release?seriesId=158&seasonNumber=8
+http://192.168.2.25:8080/sonarr/api/v3/release?episodeId=12272
+http://192.168.2.25:8080/radarr/api/v3/release?movieId=567
+
+http://192.168.2.25:9000/api/endpoints/1/docker/containers/json?all=1&filters=%7B%22label%22:%5B%22com.docker.compose.project%3Dtvstack%22%5D%7D
+"""
+
+
+class Api(object):
+ def __init__(self, url, **kwargs):
+ self.url = url
+ self.session= RQ.Session()
+ for k, v in kwargs.items():
+ setattr(self, k, v)
+ if hasattr(self, "login"):
+ self.login()
+
+ def get(self, endpoint, **kwargs):
+ ret = self.session.get(urljoin(self.url, endpoint), **kwargs)
+ ret.raise_for_status()
+ return ret
+
+ def post(self, endpoint, **kwargs):
+ return self.session.post(urljoin(self.url, endpoint), **kwargs)
+
+
+class Portainer(object):
+ def __init__(self, url, username, passwd):
+ self.url = url
+ self.session= RQ.Session()
+ jwt = self.session.post(
+ urljoin(self.url, "api/auth"),
+ json={"username": passwd, "password": username},
+ ).json()
+ self.session.headers.update({"Authorization": "Bearer {0[jwt]}".format(jwt)})
+
+ def containers(self, container_id=None):
+ if container_id is None:
+ res = self.session.get(
+ urljoin(self.url, "api/endpoints/1/docker/containers/json"),
+ params={
+ "all": 1,
+ "filters": json.dumps(
+ {"label": ["com.docker.compose.project=tvstack"]}
+ ),
+ },
+ )
+ else:
+ res = self.session.get(
+ urljoin(
+ self.url,
+ "api/endpoints/1/docker/containers/{}/json".format(container_id),
+ )
+ )
+ res.raise_for_status()
+ res = res.json()
+ if container_id is None:
+ for container in res:
+ pass
+ # print("Gettings stats for",container['Id'])
+ # container['stats']=self.stats(container['Id'])
+ # container['top']=self.top(container['Id'])
+ else:
+ res["stats"] = self.stats(container_id)
+ res["top"] = self.top(container_id)
+ return res
+
+ def top(self, container_id):
+ res = self.session.get(
+ urljoin(
+ self.url,
+ "api/endpoints/1/docker/containers/{}/top".format(container_id),
+ )
+ )
+ res.raise_for_status()
+ res = res.json()
+ cols = res["Titles"]
+ ret = []
+
+ return res
+
+ def stats(self, container_id):
+ res = self.session.get(
+ urljoin(
+ self.url,
+ "api/endpoints/1/docker/containers/{}/stats".format(container_id),
+ ),
+ params={"stream": False},
+ )
+ res.raise_for_status()
+ return res.json()
+
+ def test(self):
+ self.containers()
+ return {}
+
+
+class Jellyfin(object):
+ def __init__(self, url, api_key):
+ self.url = url
+ self.session = RQ.Session()
+ self.session.headers.update({"X-Emby-Token": api_key})
+ self.api_key = api_key
+ self.user_id = self.get_self()['Id']
+ self.playstate_commands = sorted([
+ "Stop", "Pause", "Unpause", "NextTrack", "PreviousTrack", "Seek", "Rewind", "FastForward", "PlayPause"
+ ])
+ self.session_commands = sorted([
+ "MoveUp", "MoveDown", "MoveLeft", "MoveRight", "PageUp", "PageDown", "PreviousLetter", "NextLetter", "ToggleOsd", "ToggleContextMenu", "Select", "Back", "TakeScreenshot", "SendKey", "SendString", "GoHome", "GoToSettings", "VolumeUp", "VolumeDown", "Mute", "Unmute", "ToggleMute", "SetVolume", "SetAudioStreamIndex", "SetSubtitleStreamIndex", "ToggleFullscreen", "DisplayContent", "GoToSearch", "DisplayMessage", "SetRepeatMode", "ChannelUp", "ChannelDown", "Guide", "ToggleStats", "PlayMediaSource", "PlayTrailers", "SetShuffleQueue", "PlayState", "PlayNext", "ToggleOsdMenu", "Play"
+ ])
+ # auth = 'MediaBrowser Client="MediaDash", Device="Python", DeviceId="{}", Version="{}", Token="{}"'.format(
+ # self.device_id, RQ.__version__, self.api_key
+ # )
+ # self.session.headers.update({"X-Emby-Authorization": auth})
+
+ def status(self):
+ res = self.session.get(urljoin(self.url, "System/Info"))
+ res.raise_for_status()
+ return res.json()
+
+ def chapter_image_url(self,item_id,chapter_num,tag):
+ return chapter_image_url(urljoin(self.url, "Items",item_id,"Images","Chapter",chapter_num))
+
+ def rq(self,method,url,*args,**kwargs):
+ res=self.session.request(method,urljoin(self.url, url), *args, **kwargs)
+ res.raise_for_status()
+ return res
+
+ def get(self, url, *args, **kwargs):
+ res=self.session.get(urljoin(self.url, url), *args, **kwargs)
+ res.raise_for_status()
+ return res
+
+ def post(self, url, *args, **kwargs):
+ res=self.session.post(urljoin(self.url, url), *args, **kwargs)
+ res.raise_for_status()
+ return res
+
+ def sessions(self):
+ res = self.session.get(urljoin(self.url, "Sessions"))
+ res.raise_for_status()
+ return res.json()
+
+ def media_info(self,item_id):
+ res = self.session.get(urljoin(self.url, "Users",self.user_id,"Items",item_id))
+ res.raise_for_status()
+ return res.json()
+
+ def system_info(self):
+ res = self.session.get(urljoin(self.url, "System/Info"))
+ res.raise_for_status()
+ return res.json()
+
+ def __get_child_items(self, item_id):
+ print(item_id)
+ res = self.session.get(
+ urljoin(self.url, "Users",self.user_id,"Items"),
+ params={"ParentId": item_id},
+ )
+ res.raise_for_status()
+ return res.json()
+
+ def get_recursive(self, item_id):
+ for item in self.__get_child_items(item_id).get("Items", []):
+ yield item
+ yield from self.get_recursive(item["Id"])
+
+ def get_library(self):
+ res = self.session.get(urljoin(self.url, "Library/MediaFolders"))
+ res.raise_for_status()
+ for folder in res.json().get("Items", []):
+ for item in self.get_recursive(folder["Id"]):
+ pass
+
+ def __db_fetch(self, endpoint):
+ ret = []
+ res = self.session.get(
+ urljoin(self.url, endpoint),
+ params={"StartIndex": 0, "IncludeItemTypes": "*", "ReportColumns": ""},
+ )
+ res.raise_for_status()
+ res = res.json()
+ headers = [h["Name"].lower() for h in res["Headers"]]
+ for row in res["Rows"]:
+ fields = [c["Name"] for c in row["Columns"]]
+ ret.append(dict(zip(headers, fields)))
+ ret[-1]["row_type"] = row["RowType"]
+ return ret
+
+ def get_self(self):
+ res=self.session.get(urljoin(self.url, "users","me"))
+ res.raise_for_status()
+ return res.json()[0]
+
+ def get_users(self):
+ res=self.session.get(urljoin(self.url, "users"))
+ res.raise_for_status()
+ return res.json()
+
+ def activity(self):
+ return self.__db_fetch("Reports/Activities")
+
+ def report(self):
+ return self.__db_fetch("Reports/Items")
+
+ def stop_session(self,session_id):
+ sessions=self.get("Sessions").json()
+ for session in sessions:
+ if session['Id']==session_id and "NowPlayingItem" in session:
+ s_id=session["Id"]
+ u_id=session["UserId"]
+ i_id=session['NowPlayingItem']['Id']
+ d_id=session['DeviceId']
+ self.rq("delete","Videos/ActiveEncodings",params={"deviceId":d_id,"playSessionId":s_id})
+ self.rq("delete",f"Users/{u_id}/PlayingItems/{i_id}")
+ self.rq("post",f"Sessions/{s_id}/Playing/Stop")
+
+ def test(self):
+ self.status()
+ return {}
+
+
+class QBittorrent(object):
+
+ status_map = {
+ "downloading": ("Downloading", "primary"),
+ "uploading": ("Seeding", "success"),
+ "forcedDL": ("Downloading [Forced]", "primary"),
+ "forcedUP": ("Seeding [Forced]", "success"),
+ "pausedDL": ("Downloading [Paused]", "secondary"),
+ "pausedUP": ("Seeding [Paused]", "secondary"),
+ "stalledDL": ("Downloading [Stalled]", "warning"),
+ "stalledUP": ("Seeding [Stalled]", "warning"),
+ "metaDL": ("Downloading metadata", "primary"),
+ "error": ("Error", "danger"),
+ "missingFiles": ("Missing Files", "danger"),
+ "queuedUP": ("Seeding [Queued]", "info"),
+ "queuedDL": ("Downloading [Queued]", "info"),
+ }
+
+ tracker_status = {
+ 0: ("Disabled", "secondary"),
+ 1: ("Not contacted", "info"),
+ 2: ("Working", "success"),
+ 3: ("Updating", "warning"),
+ 4: ("Not working", "danger"),
+ }
+
+ def __init__(self, url, username, passwd):
+ self.url = url
+ self.username = username
+ self.passwd = passwd
+ self.rid = int(time.time())
+ self.session= RQ.Session()
+ url = urljoin(self.url, "/api/v2/auth/login")
+ self.session.post(
+ url, data={"username": self.username, "password": self.passwd}
+ ).raise_for_status()
+
+ def get(self, url, **kwargs):
+ kwargs["rid"] = self.rid
+ url = urljoin(self.url, url)
+ res = self.session.get(url, params=kwargs)
+ res.raise_for_status()
+ try:
+ return res.json()
+ except ValueError:
+ return res.text
+
+ def add(self, **kwargs):
+ self.rid += 1
+ url = urljoin(self.url, "/api/v2/torrents/add")
+ ret = self.session.post(url, data=kwargs)
+ return ret.text, ret.status_code
+
+ def add_trackers(self, infohash, trackers=None):
+ if trackers is None:
+ trackers = []
+ for tracker_list in [
+ "https://newtrackon.com/api/live",
+ "https://ngosang.github.io/trackerslist/trackers_best.txt",
+ ]:
+ try:
+ trackers_res = RQ.get(tracker_list)
+ trackers_res.raise_for_status()
+ except Exception as e:
+ print("Error getting tracker list:", e)
+ continue
+ trackers += trackers_res.text.split()
+ url = urljoin(self.url, "/api/v2/torrents/addTrackers")
+ data = {"hash": infohash, "urls": "\n\n".join(trackers)}
+ ret = self.session.post(url, data=data)
+ ret.raise_for_status()
+ return ret.text
+
+ def poll(self, infohash=None):
+ if infohash:
+ ret = {}
+ res = self.get("/api/v2/torrents/info", hashes=infohash)
+ ret["info"] = res
+ for endpoint in ["properties", "trackers", "webseeds", "files"]:
+ url = "/api/v2/torrents/{}".format(endpoint)
+ res = self.get("/api/v2/torrents/{}".format(endpoint), hash=infohash)
+ if endpoint == "trackers":
+ for v in res:
+ if v["tier"] == "":
+ v["tier"] = -1
+ v["status"] = self.tracker_status.get(
+ v["status"], ("Unknown", "light")
+ )
+ v["total_peers"] = (
+ v["num_seeds"] + v["num_leeches"] + v["num_peers"]
+ )
+ for k in [
+ "num_seeds",
+ "num_leeches",
+ "total_peers",
+ "num_downloaded",
+ "num_peers",
+ ]:
+ if v[k] < 0:
+ v[k] = (-1, "?")
+ else:
+ v[k] = (v[k], v[k])
+ ret[endpoint] = res
+ ret["info"] = ret["info"][0]
+ ret["info"]["state"] = self.status_map.get(
+ ret["info"]["state"], (ret["info"]["state"], "light")
+ )
+ for tracker in ret["trackers"]:
+ tracker["name"] = urlparse(tracker["url"]).netloc or tracker["url"]
+ tracker["has_url"] = bool(urlparse(tracker["url"]).netloc)
+ return ret
+ res = self.get("/api/v2/sync/maindata")
+ if "torrents" in res:
+ for k, v in res["torrents"].items():
+ v["hash"] = k
+ v["speed"] = v["upspeed"] + v["dlspeed"]
+ dl_rate = v["downloaded"] / max(0, time.time() - v["added_on"])
+ if dl_rate > 0:
+ v["eta"] = max(0, (v["size"] - v["downloaded"]) / dl_rate)
+ else:
+ v["eta"] = 0
+ if v["time_active"]==0:
+ dl_rate=0
+ else:
+ dl_rate = v["downloaded"] / v["time_active"]
+ if dl_rate > 0:
+ v["eta_act"] = max(0, (v["size"] - v["downloaded"]) / dl_rate)
+ else:
+ v["eta_act"] = 0
+ res["torrents"][k] = v
+ res["version"] = self.get("/api/v2/app/version")
+ self.rid = res["rid"]
+ return res
+
+ def status(self, infohash=None):
+ self.rid += 1
+ return self.poll(infohash)
+
+ def peer_log(self, limit=0):
+ return self.get("/api/v2/log/peers")[-limit:]
+
+ def log(self, limit=0):
+ return self.get("/api/v2/log/main")[-limit:]
+
+ def test(self):
+ self.poll()
+ return {}
+
+
+class Radarr(object):
+ def __init__(self, url, api_key):
+ self.url = url
+ self.api_key = api_key
+
+ def get(self, url, **kwargs):
+ kwargs["apikey"] = self.api_key
+ kwargs["_"] = str(int(time.time()))
+ res = RQ.get(urljoin(self.url, url), params=kwargs)
+ res.raise_for_status()
+ try:
+ return res.json()
+ except:
+ return res.text
+
+ def search(self, query):
+ return self.get("api/v3/movie/lookup", term=query)
+
+ def status(self):
+ return self.get("api/v3/system/status")
+
+ def history(self, pageSize=500):
+ return self.get(
+ "api/v3/history",
+ page=1,
+ pageSize=500,
+ sortDirection="descending",
+ sortKey="date",
+ )
+
+ def calendar(self,days=30):
+ today=datetime.today()
+ start=today-timedelta(days=days)
+ end=today+timedelta(days=days)
+ return self.get("api/v3/calendar",unmonitored=False,start=start.isoformat(),end=end.isoformat())
+
+
+ def movies(self):
+ return self.get("api/v3/movie")
+
+ def queue(self, series_id):
+ return self.get("api/v3/queue")
+
+ def log(self, limit=0):
+ return self.get(
+ "api/v3/log",
+ page=1,
+ pageSize=(limit or 1024),
+ sortDirection="descending",
+ sortKey="time",
+ )
+
+ def test(self):
+ self.status()
+ return {}
+
+
+class Sonarr(object):
+ def __init__(self, url, api_key):
+ self.url = url
+ self.api_key = api_key
+
+ def get(self, url, **kwargs):
+ kwargs["apikey"] = self.api_key
+ kwargs["_"] = str(int(time.time()))
+ res = RQ.get(urljoin(self.url, url), params=kwargs)
+ res.raise_for_status()
+ try:
+ return res.json()
+ except:
+ return res.text
+
+ def search(self, query):
+ return self.get("api/v3/series/lookup", term=query)
+
+ def status(self):
+ return self.get("api/v3/system/status")
+
+ def history(self, pageSize=500):
+ return self.get(
+ "api/v3/history",
+ page=1,
+ pageSize=500,
+ sortDirection="descending",
+ sortKey="date",
+ )
+
+ def calendar(self,days=30):
+ today=datetime.today()
+ start=today-timedelta(days=days)
+ end=today+timedelta(days=days)
+ return self.get("api/v3/calendar",unmonitored=False,start=start.isoformat(),end=end.isoformat())
+
+ def series(self, series_id=None):
+ if series_id is None:
+ return self.get("api/v3/series")
+ ret = {}
+
+ ret["episodes"] = self.get("api/v3/episode", seriesId=series_id)
+ ret["episodeFile"] = self.get("api/v3/episodeFile", seriesId=series_id)
+ ret["queue"] = self.get("api/v3/queue/details", seriesId=series_id)
+ return ret
+
+ def queue(self, series_id):
+ return self.get("api/v3/queue")
+
+ def episodes(self, series_id):
+ return self.get("api/v3/episode", seriesId=series_id)
+
+ def log(self, limit=0):
+ return self.get(
+ "api/v3/log",
+ page=1,
+ pageSize=(limit or 1024),
+ sortDirection="descending",
+ sortKey="time",
+ )
+
+ def test(self):
+ self.status()
+ return {}
+
+
+class Jackett(object):
+ def __init__(self, url, api_key):
+ self.url = url
+ self.api_key = api_key
+ self.session= RQ.Session()
+ self.session.post("http://192.168.2.25:9117/jackett/UI/Dashboard")
+
+ def search(self, query, indexers=None):
+ params = {"apikey": self.api_key, "Query": query, "_": str(int(time.time()))}
+ if indexers:
+ params["Tracker[]"] = indexers
+ res = self.session.get(
+ urljoin(self.url, f"api/v2.0/indexers/all/results"), params=params
+ )
+ res.raise_for_status()
+ res = res.json()
+ for val in res["Results"]:
+ for prop in ["Gain", "Seeders", "Peers", "Grabs", "Files"]:
+ val[prop] = val.get(prop) or 0
+ return res
+
+ def indexers(self):
+ return [
+ (t["id"], t["name"])
+ for t in self.session.get(urljoin(self.url, "api/v2.0/indexers")).json()
+ if t.get("configured")
+ ]
+
+ def test(self):
+ errors = {}
+ for idx, name in self.indexers():
+ print("Testing indexer", name)
+ result = self.session.post(
+ urljoin(self.url, "api/v2.0/indexers/{}/test".format(idx))
+ )
+ if result.text:
+ errors[name] = result.json()["error"]
+ return errors
+
+class Client(object):
+ def __init__(self, cfg):
+ self.cfg = cfg
+ self.jackett = Jackett(cfg["jackett_url"], cfg["jackett_api_key"])
+ self.sonarr = Sonarr(cfg["sonarr_url"], cfg["sonarr_api_key"])
+ self.radarr = Radarr(cfg["radarr_url"], cfg["radarr_api_key"])
+ self.jellyfin = Jellyfin(
+ cfg["jellyfin_url"], cfg["jellyfin_api_key"]
+ )
+ self.qbittorent = QBittorrent(
+ cfg["qbt_url"], cfg["qbt_username"], cfg["qbt_passwd"]
+ )
+ self.portainer = Portainer(
+ cfg["portainer_url"], cfg["portainer_username"], cfg["portainer_passwd"]
+ )
+ self.ssh = Connection('root@server')
+
+ def _get_ssh_keys(self):
+ cfg = handle_config()
+ res=self.ssh.get("/data/.ssh/authorized_keys",io.BytesIO())
+ res.local.seek(0)
+ ret=[]
+ for line in str(res.local.read(),"utf8").splitlines():
+ if line.startswith("#"):
+ continue
+ else:
+ key_type,key,comment=line.split(None,2)
+ ret.append((key_type,key,comment))
+ return ret
+
+ def add_user(self,name,ssh_key):
+ cfg = handle_config()
+ user_config = cfg['jellyfin_user_config']
+ user_policy = cfg['jellyfin_user_policy']
+ passwd = genpw()
+ res=self.ssh.get("/data/.ssh/authorized_keys",io.BytesIO())
+ res.local.seek(0)
+ keys=[l.split(None,2) for l in str(res.local.read(),"utf8").splitlines()]
+ key_type,key,*_=ssh_key.split()
+ keys.append([key_type,key,name])
+ new_keys=[]
+ seen_keys=set()
+ for key_type,key,name in keys:
+ if key not in seen_keys:
+ seen_keys.add(key)
+ new_keys.append([key_type,key,name])
+ new_keys_file="\n".join(" ".join(key) for key in new_keys)
+ self.ssh.put(io.BytesIO(bytes(new_keys_file,"utf8")),"/data/.ssh/authorized_keys",preserve_mode=False)
+ user = self.jellyfin.post("Users/New", json={"Name": name, "Password": passwd})
+ user.raise_for_status()
+ user = user.json()
+ self.jellyfin.post("Users/{Id}/Configuration".format(**user), json=user_config).raise_for_status()
+ self.jellyfin.post("Users/{Id}/Policy".format(**user), json=user_policy).raise_for_status()
+ return passwd
+
+ @staticmethod
+ def test(cfg=None):
+ cfg = cfg or self.cfg
+ modules = [
+ (
+ "Jackett",
+ lambda cfg: Jackett(cfg["jackett_url"], cfg["jackett_api_key"]),
+ ),
+ ("Sonarr", lambda cfg: Sonarr(cfg["sonarr_url"], cfg["sonarr_api_key"])),
+ ("Radarr", lambda cfg: Radarr(cfg["radarr_url"], cfg["radarr_api_key"])),
+ (
+ "QBittorrent",
+ lambda cfg: QBittorrent(
+ cfg["qbt_url"], cfg["qbt_username"], cfg["qbt_passwd"]
+ ),
+ ),
+ (
+ "Jellyfin",
+ lambda cfg: Jellyfin(
+ cfg["jellyfin_url"],
+ cfg["jellyfin_username"],
+ cfg["jellyfin_passwd"],
+ ),
+ ),
+ (
+ "Portainer",
+ lambda cfg: Portainer(
+ cfg["portainer_url"],
+ cfg["portainer_username"],
+ cfg["portainer_passwd"],
+ ),
+ ),
+ ]
+ errors = {}
+ success = True
+ for mod, Module in modules:
+ try:
+ print("Testing", mod)
+ errors[mod] = Module(cfg).test()
+ if errors[mod]:
+ success = False
+ except Exception as e:
+ print(dir(e))
+ errors[mod] = str(e)
+ success = False
+ print(errors)
+ return {"success": success, "errors": errors}
diff --git a/app.py b/app.py
new file mode 100644
index 0000000..92ad3ca
--- /dev/null
+++ b/app.py
@@ -0,0 +1,586 @@
+import sys
+from gevent import monkey
+if not "--debug" in sys.argv[1:]:
+ monkey.patch_all()
+import os
+import requests as RQ
+import json
+import re
+import io
+import hashlib
+import base64
+import time
+import threading
+from webargs import fields
+from webargs.flaskparser import use_args
+from datetime import timedelta, datetime
+from pprint import pprint
+from urllib.parse import quote, urljoin, unquote_plus
+import pylab as PL
+from matplotlib.ticker import EngFormatter
+from base64 import b64encode
+from slugify import slugify
+from cryptography.hazmat.primitives.serialization import load_ssh_public_key
+from flask import (
+ Flask,
+ render_template,
+ send_from_directory,
+ request,
+ send_file,
+ redirect,
+ flash,
+ url_for,
+ session,
+ jsonify,
+ Markup
+)
+from flask_nav import Nav, register_renderer
+from flask_nav.elements import Navbar, View, Subgroup
+from flask_bootstrap import Bootstrap
+from flask_wtf.csrf import CSRFProtect
+from flask_debugtoolbar import DebugToolbarExtension
+from flask_sqlalchemy import SQLAlchemy
+
+# ===================
+import stats_collect
+from forms import ConfigForm, SearchForm, TranscodeProfileForm, AddSSHUser
+from api import Client
+from models import db, TranscodeJob, Stats
+from transcode import profiles
+from utils import (
+ BootsrapRenderer,
+ eval_expr,
+ make_tree,
+ make_placeholder_image,
+ with_application_context,
+ handle_config,
+ genpw
+)
+
+
+def left_nav():
+ links = [
+ View("Home", "index"),
+ View("Containers", "containers", container_id=None),
+ View("qBittorrent", "qbittorrent", infohash=None),
+ View("Sonarr", "sonarr", id=None),
+ View("Radarr", "radarr", id=None),
+ View("Jellyfin", "jellyfin"),
+ View("Search", "search"),
+ View("History", "history"),
+ View("Transcode", "transcode"),
+ View("Config", "config"),
+ View("Remote", "remote"),
+ View("Log", "app_log"),
+ ]
+ return Navbar("PirateDash", *links)
+
+
+def create_app():
+ templates = os.path.join(os.path.dirname(os.path.abspath(__file__)), "templates")
+ app = Flask(__name__, template_folder=templates)
+ app.config.from_pyfile("config.cfg")
+ app.bootstrap = Bootstrap(app)
+ app.csrf = CSRFProtect(app)
+ app.nav = Nav(app)
+ app.toolbar = DebugToolbarExtension(app)
+ app.jinja_env.add_extension("jinja2.ext.debug")
+ app.jinja_env.add_extension("jinja2.ext.do")
+ app.jinja_env.trim_blocks = True
+ app.jinja_env.lstrip_blocks = True
+ register_renderer(app, "bootstrap4", BootsrapRenderer)
+ app.nav.register_element("left_nav", left_nav)
+ db.init_app(app)
+ app.db = db
+ return app
+
+
+app = create_app()
+
+
+@app.template_filter("hash")
+def t_hash(s):
+ return hashlib.sha512(bytes(s, "utf-8")).hexdigest()
+
+
+@app.template_filter()
+def regex_replace(s, find, replace):
+ """A non-optimal implementation of a regex filter"""
+ return re.sub(find, replace, s)
+
+
+@app.template_filter("ctime")
+def timectime(s):
+ return time.ctime(s)
+
+
+@app.template_filter("ago")
+def timeago(s, clamp=False):
+ seconds = round(time.time() - s, 0)
+ if clamp:
+ seconds = max(0, seconds)
+ return timedelta(seconds=seconds)
+
+
+@app.template_filter("ago_dt")
+def ago_dt(s,rnd=None):
+ dt=datetime.today() - s
+ if rnd is not None:
+ secs = round(dt.total_seconds(),rnd)
+ dt=timedelta(seconds=secs)
+ return str(dt).rstrip("0")
+
+@app.template_filter("ago_dt_utc")
+def ago_dt_utc(s,rnd=None):
+ dt=datetime.utcnow() - s
+ if rnd is not None:
+ secs = round(dt.total_seconds(),rnd)
+ dt=timedelta(seconds=secs)
+ return str(dt).rstrip("0")
+
+@app.template_filter("ago_dt_utc_human")
+def ago_dt_utc_human(s,swap=False,rnd=None):
+ if not swap:
+ dt=datetime.utcnow() - s
+ else:
+ dt=s - datetime.utcnow()
+ if rnd is not None:
+ secs = round(dt.total_seconds(),rnd)
+ dt=timedelta(seconds=secs)
+ if dt.total_seconds()<0:
+ return "In "+str(-dt).rstrip("0")
+ else:
+ return str(dt).rstrip("0")+" ago"
+
+@app.template_filter("timedelta")
+def time_timedelta(s, digits=None, clamp=False):
+ if clamp:
+ s = max(s, 0)
+ if digits is not None:
+ s = round(s,digits)
+ return timedelta(seconds=s)
+
+
+@app.template_filter("fromiso")
+def time_fromiso(s):
+ t = s.rstrip("Z").split(".")[0]
+ t = datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
+ try:
+ t.microsecond = int(s.rstrip("Z").split(".")[1])
+ except:
+ pass
+ return t
+
+
+app.add_template_global(urljoin, "urljoin")
+
+@app.template_filter("slugify")
+def make_slug(s):
+ return slugify(s, only_ascii=True)
+
+
+app.template_filter()(make_tree)
+app.add_template_global(handle_config, "cfg")
+
+@app.before_request
+def before_request():
+ db.create_all()
+ app.config["APP_CONFIG"] = handle_config()
+
+
+@app.route("/static/")
+def send_static(path):
+ return send_from_directory("static", path)
+
+
+def populate_form(form, cfg=None):
+ if cfg is None:
+ cfg = handle_config()
+ for name, field in form._fields.items():
+ field.default = cfg.get(name)
+ form.transcode_default_profile.choices = [(None, "")]
+ form.transcode_default_profile.choices += [
+ (k, k) for k in (cfg.get("transcode_profiles", {}) or {}).keys()
+ ]
+
+
+def validate_transcoding_profiles(profiles):
+ for name, data in profiles.items():
+ for req, req_type in [("command", str), ("doc", str)]:
+ if req not in data:
+ raise ValueError(
+ "Profile '{}' is missing required key '{}'".format(name, req)
+ )
+ if not isinstance(data[req], req_type):
+ raise ValueError(
+ "Key '{}' of profile '{}' should be of type '{}'".format(
+ req, name, req_type.__name__
+ )
+ )
+
+
+@app.route("/config", methods=["GET", "POST"])
+def config():
+ form = ConfigForm()
+ cfg = {}
+ populate_form(form)
+ if form.validate_on_submit():
+ skip = ["save", "test", "csrf_token"]
+ transcode_profiles = request.files.get("transcode_profiles")
+ if transcode_profiles:
+ try:
+ form.transcode_profiles.data = json.load(transcode_profiles)
+ validate_transcoding_profiles(form.transcode_profiles.data)
+ except ValueError as e:
+ form.transcode_profiles.data = None
+ form.transcode_profiles.errors = [
+ "Invalid json data in file {}: {}".format(
+ transcode_profiles.filename, e
+ )
+ ]
+ else:
+ form.transcode_profiles.data = app.config["APP_CONFIG"].get(
+ "transcode_profiles", {}
+ )
+ if form.errors:
+ return render_template("config.html", form=form)
+ for name, field in form._fields.items():
+ if name in skip:
+ continue
+ cfg[name] = field.data
+ if form.test.data:
+ test_res = Client.test(cfg)
+ populate_form(form, cfg)
+ return render_template("config.html", form=form, test=test_res)
+ handle_config(cfg)
+ populate_form(form)
+ return render_template("config.html", form=form)
+ form.process()
+ return render_template("config.html", form=form)
+
+
+@app.route("/search/details", methods=["GET", "POST"])
+def details():
+ data = {
+ "info": json.loads(unquote_plus(request.form["data"])),
+ "type": request.form["type"],
+ }
+ return render_template("search/details.html", **data)
+
+
+@app.route("/search", methods=["GET", "POST"])
+def search():
+ cfg = handle_config()
+ c = Client(cfg)
+ results = {}
+ params = request.args
+ form = SearchForm()
+ form.indexer.choices = c.jackett.indexers()
+ if form.validate_on_submit():
+ query = form.query.data
+ if not (form.torrents.data or form.movies.data or form.tv_shows.data):
+ form.torrents.data = True
+ form.movies.data = True
+ form.tv_shows.data = True
+
+ if form.torrents.data:
+ results["torrents"] = c.jackett.search(
+ query, form.indexer.data or form.indexer.choices
+ )
+ if form.movies.data:
+ results["movies"] = c.radarr.search(query)
+ if form.tv_shows.data:
+ results["tv_shows"] = c.sonarr.search(query)
+ return render_template(
+ "search/index.html",
+ # form=form,
+ search_term=query,
+ results=results,
+ client=c,
+ group_by_tracker=form.group_by_tracker.data,
+ )
+ for name, field in form._fields.items():
+ field.default = params.get(name)
+ form.process()
+ return render_template(
+ "search/index.html",
+ form=form,
+ results={},
+ group_by_tracker=False,
+ sort_by="Gain",
+ )
+
+
+@app.route("/api/add_torrent", methods=["POST"])
+def add_torrent():
+ category=request.form.get("category")
+ cfg = handle_config()
+ c = Client(cfg)
+ hashes_1 = set(c.qbittorent.status().get("torrents", {}))
+ links = ""
+ count = 0
+ for link in request.form.getlist("torrent[]"):
+ print(link)
+ links += link + "\n"
+ count += 1
+ c.qbittorent.add(urls=links,category=category)
+ for _ in range(10):
+ status=c.qbittorent.status().get("torrents", {})
+ hashes_2 = set(status)
+ if len(hashes_2 - hashes_1) == count:
+ break
+ time.sleep(0.5)
+ else:
+ flash("Some torrents failed to get added to QBittorrent", "waring")
+ new_torrents = sorted(hashes_2 - hashes_1)
+ session["new_torrents"] = {h: status[h] for h in new_torrents}
+ return redirect(url_for("search"))
+
+
+@app.route("/history")
+def history():
+ cfg = handle_config()
+ c = Client(cfg)
+ sonarr = c.sonarr.history()
+ radarr = c.radarr.history()
+ return render_template("history.html", sonarr=sonarr, radarr=radarr)
+
+
+@app.route("/sonarr", defaults={"show_id": None})
+@app.route("/sonarr/")
+def sonarr(show_id):
+ cfg = handle_config()
+ c = Client(cfg)
+ if not show_id:
+ series = c.sonarr.series()
+ status = c.sonarr.status()
+ return render_template(
+ "sonarr/index.html", series=series, status=status, history=history
+ )
+ return render_template("sonarr/details.html")
+
+
+@app.route("/radarr", defaults={"movie_id": None})
+@app.route("/radarr/")
+def radarr(movie_id):
+ cfg = handle_config()
+ c = Client(cfg)
+ if movie_id is None:
+ movies = c.radarr.movies()
+ status = c.radarr.status()
+ history = c.radarr.history()
+ return render_template(
+ "radarr/index.html", movies=movies, status=status, history=history
+ )
+ return render_template("radarr/details.html")
+
+
+@app.route("/qbittorrent")
+def qbittorrent():
+ cfg = handle_config()
+ c = Client(cfg)
+ qbt = c.qbittorent.status()
+ sort_by_choices = {
+ "speed": "Transfer Speed",
+ "eta": "Time remaining",
+ "state": "State",
+ "category": "Category",
+ }
+ return render_template(
+ "qbittorrent/index.html",
+ qbt=qbt,
+ status_map=c.qbittorent.status_map,
+ state_filter=request.args.get("state"),
+ sort_by=request.args.get("sort","speed"),
+ sort_by_choices=sort_by_choices,
+ )
+
+
+@app.route("/qbittorrent/add_trackers/")
+def qbittorent_add_trackers(infohash):
+ cfg = handle_config()
+ c = Client(cfg)
+ c.qbittorent.add_trackers(infohash)
+ return redirect(url_for("qbittorrent_details",infohash=infohash))
+
+@app.route("/qbittorrent/")
+def qbittorrent_details(infohash):
+ cfg = handle_config()
+ c = Client(cfg)
+ qbt = c.qbittorent.status(infohash)
+ return render_template(
+ "qbittorrent/details.html", qbt=qbt, status_map=c.qbittorent.status_map
+ )
+
+
+from wtforms_alchemy import model_form_factory, ModelFieldList
+from flask_wtf import FlaskForm
+from wtforms.fields import FormField
+
+BaseModelForm = model_form_factory(FlaskForm)
+
+
+class ModelForm(BaseModelForm):
+ @classmethod
+ def get_session(self):
+ return app.db.session
+
+
+@app.route("/test", methods=["GET", "POST"])
+def test():
+ form = TranscodeProfileForm()
+ if form.validate_on_submit():
+ print(form.data)
+ return render_template("test.html", form=form)
+
+
+@app.route("/placeholder")
+def placeholder():
+ return send_file(make_placeholder_image(**request.args), mimetype="image/png")
+
+
+@app.route("/containers", defaults={"container_id": None})
+@app.route("/containers/")
+def containers(container_id):
+ cfg = handle_config()
+ c = Client(cfg)
+ if container_id:
+ container = c.portainer.containers(container_id)
+ return render_template("containers/details.html", container=container)
+ containers = c.portainer.containers()
+ return render_template("containers/index.html", containers=containers)
+
+
+def get_stats():
+ if os.path.isfile("stats.lock"):
+ return None
+ try:
+ if os.path.isfile("stats.json"):
+ with open("stats.json") as fh:
+ return json.load(fh)
+ except:
+ return None
+
+
+@app.route("/transcode", methods=["GET", "POST"])
+def transcode():
+ return render_template("transcode/profiles.html")
+
+
+@app.route("/log")
+def app_log():
+ cfg = handle_config()
+ c = Client(cfg)
+ logs = {
+ "radarr": c.radarr.log(),
+ "sonarr": c.sonarr.log(),
+ "qbt": c.qbittorent.log(),
+ "peers": c.qbittorent.peer_log(),
+ }
+ return render_template("logs.html", logs=logs)
+
+
+def ssh_fingerprint(key):
+ fp=hashlib.md5(base64.b64decode(key)).hexdigest()
+ return ':'.join(a+b for a,b in zip(fp[::2], fp[1::2]))
+
+@app.route("/remote")
+def remote():
+ cfg = handle_config()
+ c = Client(cfg)
+ res = c.ssh.get("/data/.ssh/authorized_keys",io.BytesIO())
+ res.local.seek(0)
+ ssh_keys=[]
+ for key in str(res.local.read(),"utf8").splitlines():
+ disabled=False
+ if key.startswith("#"):
+ key=key.lstrip("#").lstrip()
+ disabled=True
+ try:
+ load_ssh_public_key(bytes(key,"utf8"))
+ except:
+ continue
+ key_type,key,name=key.split(None,2)
+ ssh_keys.append({
+ 'disabled': disabled,
+ 'type':key_type,
+ 'key':key,
+ 'fingerprint': ssh_fingerprint(key),
+ 'name': name
+ })
+ key=request.args.get("key")
+ enabled=request.args.get("enabled")
+ if not (key is None or enabled is None):
+ key_file=[]
+ for ssh_key in ssh_keys:
+ if ssh_key['key']==key:
+ ssh_key['disabled']=enabled=="False"
+ if ssh_key['disabled']:
+ key_file.append("#{type} {key} {name}".format(**ssh_key))
+ else:
+ key_file.append("{type} {key} {name}".format(**ssh_key))
+ buf=io.BytesIO(bytes("\n".join(key_file),"utf8"))
+ c.ssh.put(buf,"/data/.ssh/authorized_keys",preserve_mode=False)
+ return redirect(url_for("remote"))
+ jellyfin_users = c.jellyfin.get_users()
+ return render_template("remote/index.html",ssh=ssh_keys,jf=jellyfin_users)
+
+@app.route("/jellyfin/stop")
+def stop_stream():
+ cfg = handle_config()
+ c = Client(cfg)
+ session_id=request.args.get("session")
+ c.jellyfin.stop_session(session_id)
+ return redirect(url_for("jellyfin"))
+
+@app.route("/jellyfin")
+def jellyfin():
+ cfg = handle_config()
+ c = Client(cfg)
+ jellyfin={
+ "users":c.jellyfin.get_users(),
+ "sessions": c.jellyfin.sessions(),
+ "info" : c.jellyfin.system_info()
+ }
+ return render_template("jellyfin/index.html",jellyfin=jellyfin)
+
+@app.route("/remote/add",methods=["GET","POST"])
+def remote_add():
+ from cryptography.hazmat.primitives import serialization
+ form = AddSSHUser()
+ cfg = handle_config()
+ c = Client(cfg)
+ if form.validate_on_submit():
+ key=load_ssh_public_key(bytes(form.data['ssh_key'],"utf8"))
+ rawKeyData = key.public_bytes(
+ encoding=serialization.Encoding.OpenSSH,
+ format=serialization.PublicFormat.OpenSSH,
+ )
+ passwd=c.add_user(form.data['name'],str(rawKeyData,"utf8"))
+ flash(Markup("".join([
+ f"Name: {form.data['name']}
",
+ f"PW: {passwd}
",
+ f"FP: {ssh_fingerprint(rawKeyData.split()[1])}
"
+ ])))
+ return render_template("remote/add.html",form=form)
+
+
+@app.route("/")
+def index():
+ return render_template("index.html", fluid=True, data=get_stats())
+
+
+if __name__ == "__main__":
+ stats_collector = threading.Thread(
+ None, stats_collect.loop, "stats_collector", (10,), {}, daemon=True
+ )
+ stats_collector.start()
+ port = 5000
+ if "--debug" in sys.argv:
+ app.run(host="0.0.0.0",port=port, debug=True)
+ else:
+ from gevent.pywsgi import WSGIServer
+
+ server = WSGIServer(("0.0.0.0", port), app)
+ print("Running on {0}:{1}".format(*server.address))
+ server.serve_forever()
diff --git a/config.cfg b/config.cfg
new file mode 100644
index 0000000..657738d
--- /dev/null
+++ b/config.cfg
@@ -0,0 +1,4 @@
+SECRET_KEY = b"DEADBEEF"
+SQLALCHEMY_DATABASE_URI = "sqlite:///Mediadash.db"
+SQLALCHEMY_TRACK_MODIFICATIONS = False
+MAX_CONTENT_LENGTH = 1 * 1024 * 1024 #1MB
\ No newline at end of file
diff --git a/config.example.json b/config.example.json
new file mode 100644
index 0000000..3ff3bbb
--- /dev/null
+++ b/config.example.json
@@ -0,0 +1,125 @@
+{
+ "jellyfin_url": "http://127.0.0.1:8096/",
+ "jellyfin_api_key": "",
+ "qbt_url": "http://127.0.0.1:8081/",
+ "qbt_username": "",
+ "qbt_passwd": "",
+ "sonarr_url": "http://127.0.0.1:8080/sonarr/",
+ "sonarr_api_key": "",
+ "radarr_url": "http://127.0.0.1:8080/radarr/",
+ "radarr_api_key": "",
+ "jackett_url": "http://127.0.0.1:9117/jackett/",
+ "jackett_api_key": "",
+ "portainer_url": "http://127.0.0.1:9000/",
+ "portainer_username": "",
+ "portainer_passwd": "",
+ "transcode_default_profile": "MKV Remux",
+ "transcode_profiles": {
+ "MKV Remux": {
+ "command": "-vcodec copy -acodec copy -scodec copy -map 0 -map_metadata 0 -f {format}",
+ "doc": "Remux",
+ "vars": {
+ "format": "Conainter format"
+ },
+ "defaults": {
+ "format": "matroska"
+ }
+ },
+ "H.264 transcode": {
+ "command": "-vcodec h264 -crf {crf} -preset {preset} -acodec copy -scodec copy -map 0 -map_metadata 0",
+ "doc": "Transcode video to H.264",
+ "vars": {
+ "crf": "Constant Rate Factor (Quality, lower is better)",
+ "preset": "H.264 preset"
+ },
+ "defaults": {
+ "crf": 18,
+ "preset": "medium"
+ },
+ "choices": {
+ "tune": ["animation","film","grain"],
+ "preset": ["ultrafast","fast","medium","slow","veryslow"],
+ "crf": {"range":[10,31]}
+ }
+ },
+ "H.265 transcode": {
+ "command": "-vcodec hevc -crf {crf} -preset {preset} -tune {tune} -acodec copy -scodec copy -map 0 -map_metadata 0",
+ "doc": "Transcode video to H.265",
+ "vars": {
+ "crf": "Constant Rate Factor (Quality, lower is better)",
+ "preset": "H.265 preset",
+ "tune": "H.265 tune preset"
+ },
+ "defaults": {
+ "crf": 24,
+ "preset": "medium",
+ "tune": "animation"
+ },
+ "choices": {
+ "tune": ["animation","film","grain"],
+ "preset": ["ultrafast","fast","medium","slow","veryslow"],
+ "crf": {"range":[10,31]}
+ }
+ },
+ "AAC transcode": {
+ "command": "-vcodec copy -acodec aac -scodec copy -map 0 -map_metadata 0",
+ "doc": "Transcode audio to AAC"
+ }
+ },
+ "jellyfin_user_config": {
+ "DisplayCollectionsView": false,
+ "DisplayMissingEpisodes": false,
+ "EnableLocalPassword": false,
+ "EnableNextEpisodeAutoPlay": true,
+ "GroupedFolders": [],
+ "HidePlayedInLatest": true,
+ "LatestItemsExcludes": [],
+ "MyMediaExcludes": [],
+ "OrderedViews": [],
+ "PlayDefaultAudioTrack": true,
+ "RememberAudioSelections": true,
+ "RememberSubtitleSelections": true,
+ "SubtitleLanguagePreference": "",
+ "SubtitleMode": "Default"
+ },
+ "jellyfin_user_policy": {
+ "AccessSchedules": [],
+ "AuthenticationProviderId": "Jellyfin.Server.Implementations.Users.DefaultAuthenticationProvider",
+ "BlockUnratedItems": [],
+ "BlockedChannels": [],
+ "BlockedMediaFolders": [],
+ "BlockedTags": [],
+ "EnableAllChannels": false,
+ "EnableAllDevices": true,
+ "EnableAllFolders": false,
+ "EnableAudioPlaybackTranscoding": true,
+ "EnableContentDeletion": false,
+ "EnableContentDeletionFromFolders": [],
+ "EnableContentDownloading": true,
+ "EnableLiveTvAccess": true,
+ "EnableLiveTvManagement": true,
+ "EnableMediaConversion": true,
+ "EnableMediaPlayback": true,
+ "EnablePlaybackRemuxing": true,
+ "EnablePublicSharing": true,
+ "EnableRemoteAccess": true,
+ "EnableRemoteControlOfOtherUsers": false,
+ "EnableSharedDeviceControl": true,
+ "EnableSyncTranscoding": true,
+ "EnableUserPreferenceAccess": true,
+ "EnableVideoPlaybackTranscoding": true,
+ "EnabledChannels": [],
+ "EnabledDevices": [],
+ "EnabledFolders": [],
+ "ForceRemoteSourceTranscoding": false,
+ "InvalidLoginAttemptCount": 0,
+ "IsAdministrator": false,
+ "IsDisabled": false,
+ "IsHidden": true,
+ "LoginAttemptsBeforeLockout": -1,
+ "MaxActiveSessions": 1,
+ "PasswordResetProviderId": "Jellyfin.Server.Implementations.Users.DefaultPasswordResetProvider",
+ "RemoteClientBitrateLimit": 1000000,
+ "SyncPlayAccess": "CreateAndJoinGroups"
+ }
+}
\ No newline at end of file
diff --git a/forms.py b/forms.py
new file mode 100644
index 0000000..1e67e6e
--- /dev/null
+++ b/forms.py
@@ -0,0 +1,96 @@
+# -*- coding: utf-8 -*-
+from flask_wtf import FlaskForm
+import json
+import os
+from cryptography.hazmat.primitives.serialization import load_ssh_public_key
+from wtforms import (
+ StringField,
+ PasswordField,
+ FieldList,
+ FloatField,
+ BooleanField,
+ SelectField,
+ SubmitField,
+ validators,
+ Field,
+ FieldList,
+ SelectMultipleField,
+ TextAreaField,
+ FieldList,
+ FormField,
+)
+from flask_wtf.file import FileField, FileAllowed, FileRequired
+from wtforms.ext.sqlalchemy.orm import model_form
+from wtforms.fields.html5 import SearchField
+from wtforms.widgets.html5 import NumberInput
+from wtforms.widgets import TextInput, CheckboxInput, ListWidget, PasswordInput
+from wtforms.validators import (
+ ValidationError,
+ DataRequired,
+ URL,
+ ValidationError,
+ Optional,
+)
+
+
+def json_prettify(file):
+ with open(file, "r") as fh:
+ return json.dumps(json.load(fh), indent=4)
+
+class SearchForm(FlaskForm):
+ query = SearchField("Query", validators=[DataRequired()])
+ tv_shows = BooleanField("TV Shows", default=True)
+ movies = BooleanField("Movies", default=True)
+ torrents = BooleanField("Torrents", default=True)
+ indexer = SelectMultipleField(choices=[])
+ group_by_tracker = BooleanField("Group torrents by tracker")
+ search = SubmitField("Search")
+
+class HiddenPassword(PasswordField):
+ widget = PasswordInput(hide_value=False)
+
+class TranscodeProfileForm(FlaskForm):
+ test = TextAreaField()
+ save = SubmitField("Save")
+
+class AddSSHUser(FlaskForm):
+ name = StringField("Name", validators=[DataRequired()])
+ ssh_key = StringField("Public key", validators=[DataRequired()])
+ add = SubmitField("Add")
+
+ def validate_ssh_key(self,field):
+ key=load_ssh_public_key(bytes(field.data,"utf8"))
+
+
+
+class ConfigForm(FlaskForm):
+ jellyfin_url = StringField("URL", validators=[URL()])
+ jellyfin_api_key = StringField("API Key")
+
+ qbt_url = StringField("URL", validators=[URL()])
+ qbt_username = StringField("Username")
+ qbt_passwd = HiddenPassword("Password")
+
+ sonarr_url = StringField("URL", validators=[URL()])
+ sonarr_api_key = HiddenPassword("API key")
+
+ radarr_url = StringField("URL", validators=[URL()])
+ radarr_api_key = HiddenPassword("API key")
+
+ jackett_url = StringField("URL", validators=[URL()])
+ jackett_api_key = HiddenPassword("API key")
+
+ portainer_url = StringField("URL", validators=[URL()])
+ portainer_username = StringField("Username")
+ portainer_passwd = HiddenPassword("Password")
+
+ transcode_default_profile = SelectField(
+ "Default profile", choices=[], validators=[]
+ )
+ transcode_profiles = FileField(
+ "Transcode profiles JSON",
+ validators=[Optional(), FileAllowed(["json"], "JSON files only!")],
+ )
+
+ test = SubmitField("Test")
+ save = SubmitField("Save")
diff --git a/models/__init__.py b/models/__init__.py
new file mode 100644
index 0000000..ef86525
--- /dev/null
+++ b/models/__init__.py
@@ -0,0 +1,4 @@
+from flask_sqlalchemy import SQLAlchemy
+db = SQLAlchemy()
+from .stats import Stats
+from .transcode import TranscodeJob
\ No newline at end of file
diff --git a/models/stats.py b/models/stats.py
new file mode 100644
index 0000000..2ea0348
--- /dev/null
+++ b/models/stats.py
@@ -0,0 +1,14 @@
+from . import db
+from sqlalchemy import String, Float, Column, Integer, DateTime
+from datetime import datetime
+
+
+class Stats(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ timestamp = db.Column(db.DateTime, default=datetime.today)
+ key = db.Column(db.String)
+ value = db.Column(db.Float)
+
+class Diagrams(db.Model):
+ name = db.Column(db.String,primary_key=True)
+ data = db.Column(db.String)
\ No newline at end of file
diff --git a/models/transcode.py b/models/transcode.py
new file mode 100644
index 0000000..39cf917
--- /dev/null
+++ b/models/transcode.py
@@ -0,0 +1,13 @@
+from . import db
+from sqlalchemy import String, Float, Column, Integer, DateTime, ForeignKey
+from sqlalchemy_utils import JSONType
+from sqlalchemy.orm import relationship
+from datetime import datetime
+
+
+class TranscodeJob(db.Model):
+ id = db.Column(db.Integer, primary_key=True)
+ created = db.Column(db.DateTime, default=datetime.today)
+ status = db.Column(JSONType, default={})
+ completed = db.Column(db.DateTime, default=None)
+ profile = db.Column(db.String, default=None)
\ No newline at end of file
diff --git a/models/users.py b/models/users.py
new file mode 100644
index 0000000..e69de29
diff --git a/static/theme.css b/static/theme.css
new file mode 100644
index 0000000..6af52a6
--- /dev/null
+++ b/static/theme.css
@@ -0,0 +1,129 @@
+body,
+input,
+select,
+pre,
+textarea,
+tr {
+ background-color: #222 !important;
+ color: #eee;
+}
+
+pre.inline {
+ display: inline;
+ margin: 0;
+}
+
+th {
+ border-bottom: 1px;
+}
+
+thead, table {
+ line-height: 1;
+ color: #eee;
+}
+
+hr {
+ color: #eee;
+ border-color: #eee;
+ margin: 10px 0;
+}
+
+p {
+ padding: 0;
+ margin: 0;
+}
+
+.list-group-item {
+ background-color: #181818;
+ border-color: #eee;
+}
+
+.dropdown-menu {
+ background-color: #444;
+}
+
+.progress {
+ background-color: #444;
+}
+.progress-bar {
+ background-color: #f70;
+}
+
+.form-control {
+ color: #eee !important;
+}
+
+.form-group {
+ margin-bottom: 0;
+}
+
+.btn {
+ margin-top: 10px;
+}
+.form-control-label {
+ margin-top: 10px;
+}
+
+.torrent_results {
+ width: 100%;
+}
+
+.nav-pills {
+ margin-top: 10px;
+}
+
+h1,
+h2,
+h3 {
+ margin-top: 10px;
+}
+
+/* Remove default bullets */
+ul.file_tree,
+ul.tree,
+ul.file {
+ list-style-type: none;
+ margin: 0;
+ padding: 0;
+}
+
+ul.tree {
+ padding-left: 10px;
+}
+
+.monospace {
+ font-family: monospace;
+ overflow: scroll;
+ max-height: 500px;
+ min-height: 500px;
+ max-width: 100%;
+ min-width: 100%;
+}
+
+/* Style the caret/arrow */
+.custom_caret {
+ cursor: pointer;
+ user-select: none;
+ /* Prevent text selection */
+}
+
+/* Create the caret/arrow with a unicode, and style it */
+.custom_caret::before {
+ content: "[+]";
+ display: inline-block;
+}
+
+/* Rotate the caret/arrow icon when clicked on (using JavaScript) */
+.custom_caret-down::before {
+ content: "[-]";
+}
+
+/* Hide the nested list */
+.nested {
+ display: none;
+}
+
+/* Show the nested list when the user clicks on the caret/arrow (with JavaScript) */
+.active {
+ display: block;
+}
diff --git a/stats_collect.py b/stats_collect.py
new file mode 100644
index 0000000..bc1309d
--- /dev/null
+++ b/stats_collect.py
@@ -0,0 +1,409 @@
+import pylab as PL
+from matplotlib.ticker import EngFormatter
+from base64 import b64encode
+from api import Client
+from utils import handle_config
+import time
+import json
+import io
+import os
+from urllib.parse import quote
+from datetime import datetime
+from concurrent.futures import ThreadPoolExecutor, as_completed
+
+mpl_style = "dark_background"
+
+smoothness = 5
+
+
+def make_svg(data, dtype):
+ data_uri = "data:{};base64,{}".format(dtype, quote(str(b64encode(data), "ascii")))
+ return ' '.format(data_uri)
+
+
+def make_smooth(data, window_size):
+ ret = []
+ for i, _ in enumerate(data):
+ block = data[i : i + window_size]
+ ret.append(sum(block) / len(block))
+ return ret
+
+
+def stackplot(data, names, title=None, color="#eee", unit=None, smooth=0):
+ fig = io.BytesIO()
+ with PL.style.context(mpl_style):
+ labels = []
+ values = []
+ for k, v in names.items():
+ t = list(map(datetime.fromtimestamp, data["t"]))
+ if smooth:
+ data[v] = make_smooth(data[v], smooth)
+ values.append(data[v])
+ labels.append(k)
+ PL.stackplot(t, values, labels=labels)
+ PL.legend()
+ PL.grid(True, ls="--")
+ PL.gcf().autofmt_xdate()
+ PL.gca().margins(x=0)
+ if title:
+ PL.title(title)
+ if unit:
+ PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
+ PL.tight_layout()
+ PL.savefig(fig, format="svg", transparent=True)
+ PL.clf()
+ return make_svg(fig.getvalue(), "image/svg+xml")
+
+
+def lineplot(data, names, title=None, color="#eee", unit=None, smooth=0):
+ fig = io.BytesIO()
+ with PL.style.context(mpl_style):
+ for k, v in names.items():
+ t = list(map(datetime.fromtimestamp, data["t"]))
+ if smooth:
+ data[v] = make_smooth(data[v], smooth)
+ PL.plot(t, data[v], label=k)
+ PL.legend()
+ PL.grid(True, ls="--")
+ PL.gcf().autofmt_xdate()
+ PL.gca().margins(x=0)
+ if title:
+ PL.title(title)
+ if unit:
+ PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
+ PL.tight_layout()
+ PL.savefig(fig, format="svg", transparent=True)
+ PL.clf()
+ return make_svg(fig.getvalue(), "image/svg+xml")
+
+
+def histogram(values, bins, title=None, color="#eee", unit=""):
+ fig = io.BytesIO()
+ with PL.style.context(mpl_style):
+ PL.hist(values, bins=bins, log=True)
+ if title:
+ PL.title(title)
+ PL.grid(True, ls="--")
+ PL.gca().xaxis.set_major_formatter(EngFormatter(unit=unit))
+ PL.gca().margins(x=0)
+ PL.tight_layout()
+ PL.savefig(fig, format="svg", transparent=True)
+ PL.clf()
+ return make_svg(fig.getvalue(), "image/svg+xml")
+
+
+def prc_label(label, idx, values):
+ return "{} ({}, {:.2%}%)".format(label, values[idx], values[idx] / sum(values))
+
+
+def byte_labels(label, idx, values):
+ orig_values = list(values)
+ suffix = ["", "K", "M", "G", "T", "P", "E"]
+ i = 0
+ while values[idx] > 1024 and i < len(suffix):
+ values[idx] /= 1024
+ i += 1
+ val = "{:.2f} {}iB".format(values[idx], suffix[i])
+ return "{} ({}, {:.2%}%)".format(label, val, orig_values[idx] / sum(orig_values))
+
+
+def byte_rate_labels(label, idx, values):
+ orig_values = list(values)
+ suffix = ["", "K", "M", "G", "T", "P", "E"]
+ i = 0
+ while values[idx] > 1024 and i < len(suffix):
+ values[idx] /= 1024
+ i += 1
+ val = "{:.2f} {}iB/s".format(values[idx], suffix[i])
+ return "{} ({})".format(label, val)
+
+
+def piechart(items, title=None, labelfunc=prc_label, sort=True):
+ fig = io.BytesIO()
+ labels = []
+ values = []
+ colors = []
+ if sort:
+ items = sorted(items.items(), key=lambda v: v[1])
+ else:
+ items = sorted(items.items())
+ for k, v in items:
+ labels.append(k)
+ if isinstance(v, tuple) and len(v) == 2:
+ v, c = v
+ colors.append(c)
+ values.append(v)
+ colors = colors or None
+ for i, label in enumerate(labels):
+ labels[i] = labelfunc(label, i, values[:])
+ with PL.style.context(mpl_style):
+ PL.pie(values, labels=labels, colors=colors, labeldistance=None)
+ PL.legend()
+ if title:
+ PL.title(title)
+ PL.tight_layout()
+ PL.savefig(fig, format="svg", transparent=True)
+ PL.clf()
+ return make_svg(fig.getvalue(), "image/svg+xml")
+
+
+hist = {
+ "t": [],
+ "dl": [],
+ "ul": [],
+ "dl_size": [],
+ "ul_size": [],
+ "dl_size_sess": [],
+ "ul_size_sess": [],
+ "connections": [],
+ "bw_per_conn": [],
+ "dht_nodes": [],
+}
+
+
+def update_qbt_hist(stats, limit=1024):
+ global hist
+ data = stats["qbt"]["status"]
+ hist["t"].append(time.time())
+ hist["dl"].append(data["server_state"]["dl_info_speed"])
+ hist["ul"].append(data["server_state"]["up_info_speed"])
+ hist["dl_size"].append(data["server_state"]["alltime_dl"])
+ hist["ul_size"].append(data["server_state"]["alltime_ul"])
+ hist["dl_size_sess"].append(data["server_state"]["dl_info_data"])
+ hist["ul_size_sess"].append(data["server_state"]["up_info_data"])
+ hist["connections"].append(data["server_state"]["total_peer_connections"])
+ hist["dht_nodes"].append(data["server_state"]["dht_nodes"])
+ hist["bw_per_conn"].append(
+ (data["server_state"]["dl_info_speed"] + data["server_state"]["up_info_speed"])
+ / data["server_state"]["total_peer_connections"]
+ )
+ for k in hist:
+ hist[k] = hist[k][-limit:]
+ last_idx = 0
+ for i, (t1, t2) in enumerate(zip(hist["t"], hist["t"][1:])):
+ if abs(t1 - t2) > (60 * 60): # 1h
+ last_idx = i + 1
+ for k in hist:
+ hist[k] = hist[k][last_idx:]
+ return hist
+
+
+def collect_stats():
+ from collections import Counter
+
+ PL.clf()
+ cfg = handle_config()
+ c = Client(cfg)
+ series={}
+ movies={}
+ data = {
+ "radarr": {"entries": c.radarr.movies(), "status": c.radarr.status()},
+ "sonarr": {
+ "entries": c.sonarr.series(),
+ "status": c.sonarr.status(),
+ "details": {},
+ },
+ "qbt": {"status": c.qbittorent.status()},
+ }
+ for show in data["sonarr"]["entries"]:
+ series[show["id"]]=show
+ for movie in data["radarr"]["entries"]:
+ movies[movie["id"]]=movie
+ torrent_states = {}
+ torrent_categories = {}
+ for torrent in data["qbt"]["status"]["torrents"].values():
+ state = c.qbittorent.status_map.get(torrent["state"], (torrent["state"], None))[
+ 0
+ ]
+ category = torrent["category"] or ""
+ torrent_states.setdefault(state, 0)
+ torrent_categories.setdefault(category, 0)
+ torrent_states[state] += 1
+ torrent_categories[category] += 1
+ vbitrates = []
+ abitrates = []
+ acodecs = []
+ vcodecs = []
+ qualities = []
+ formats = []
+ sizes = {"Shows": 0, "Movies": 0}
+ radarr_stats = {"missing": 0, "available": 0}
+ for movie in data["radarr"]["entries"]:
+ if movie["hasFile"]:
+ radarr_stats["available"] += 1
+ else:
+ radarr_stats["missing"] += 1
+ sizes["Movies"] += movie.get("movieFile", {}).get("size", 0)
+ vbr = movie.get("movieFile", {}).get("mediaInfo", {}).get("videoBitrate", None)
+ abr = movie.get("movieFile", {}).get("mediaInfo", {}).get("audioBitrate", None)
+ acodec = movie.get("movieFile", {}).get("mediaInfo", {}).get("audioCodec", None)
+ vcodec = movie.get("movieFile", {}).get("mediaInfo", {}).get("videoCodec", None)
+ fmt = movie.get("movieFile", {}).get("relativePath", "").split(".")[-1].lower()
+ qual = (
+ movie.get("movieFile", {}).get("quality", {}).get("quality", {}).get("name")
+ )
+ if qual:
+ qualities.append(qual)
+ if acodec:
+ acodecs.append(acodec)
+ if vcodec:
+ if vcodec.lower() in ["x265", "h265", "hevc"]:
+ vcodec = "H.265"
+ if vcodec.lower() in ["x264", "h264"]:
+ vcodec = "H.264"
+ vcodecs.append(vcodec)
+ if vbr:
+ vbitrates.append(vbr)
+ if abr:
+ abitrates.append(abr)
+ if fmt:
+ formats.append(fmt)
+ sonarr_stats = {"missing": 0, "available": 0}
+ info_jobs = []
+ with ThreadPoolExecutor(16) as pool:
+ for show in data["sonarr"]["entries"]:
+ info_jobs.append(pool.submit(c.sonarr.series, show["id"]))
+ for job, show in zip(
+ as_completed(info_jobs),
+ data["sonarr"]["entries"],
+ ):
+ info = job.result()
+ data["sonarr"]["details"][show["id"]] = info
+ for file in info["episodeFile"]:
+ vbr = file.get("mediaInfo", {}).get("videoBitrate", None)
+ abr = file.get("mediaInfo", {}).get("audioBitrate", None)
+ acodec = file.get("mediaInfo", {}).get("audioCodec", None)
+ vcodec = file.get("mediaInfo", {}).get("videoCodec", None)
+ fmt = file.get("relativePath", "").split(".")[-1].lower()
+ qual = file.get("quality", {}).get("quality", {}).get("name")
+ sizes["Shows"] += file.get("size", 0)
+ if qual:
+ qualities.append(qual)
+ if acodec:
+ acodecs.append(acodec)
+ if vcodec:
+ if vcodec.lower() in ["x265", "h265", "hevc"]:
+ vcodec = "H.265"
+ if vcodec.lower() in ["x264", "h264"]:
+ vcodec = "H.264"
+ vcodecs.append(vcodec)
+ if vbr:
+ vbitrates.append(vbr)
+ if abr:
+ abitrates.append(abr)
+ if fmt:
+ formats.append(fmt)
+ for season in show.get("seasons", []):
+ stats = season.get("statistics", {})
+ sonarr_stats["missing"] += (
+ stats["totalEpisodeCount"] - stats["episodeFileCount"]
+ )
+ sonarr_stats["available"] += stats["episodeFileCount"]
+ hist = update_qbt_hist(data)
+ sonarr_stats["available"] = (sonarr_stats["available"], "#5f5")
+ sonarr_stats["missing"] = (sonarr_stats["missing"], "#f55")
+ radarr_stats["available"] = (radarr_stats["available"], "#5f5")
+ radarr_stats["missing"] = (radarr_stats["missing"], "#f55")
+ imgs = [
+ [
+ "Media",
+ histogram([vbitrates], "auto", "Video Bitrate", unit="b/s"),
+ histogram([abitrates], "auto", "Audio Bitrate", unit="b/s"),
+ piechart(dict(Counter(vcodecs)), "Video codecs"),
+ piechart(dict(Counter(acodecs)), "Audio codecs"),
+ piechart(dict(Counter(formats)), "Container formats"),
+ piechart(dict(Counter(qualities)), "Quality"),
+ piechart(sizes, "Disk usage", byte_labels),
+ piechart(sonarr_stats, "Episodes"),
+ piechart(radarr_stats, "Movies"),
+ ],
+ [
+ "Torrents",
+ piechart(torrent_states, "Torrents"),
+ piechart(torrent_categories, "Torrent categories"),
+ piechart(
+ {"Upload": hist["ul"][-1]+0.0, "Download": hist["dl"][-1]+0.0},
+ "Bandwidth utilization",
+ byte_rate_labels,
+ sort=False,
+ ),
+ stackplot(
+ hist,
+ {"Download": "dl", "Upload": "ul"},
+ "Transfer speed",
+ unit="b/s",
+ smooth=smoothness,
+ ),
+ stackplot(
+ hist,
+ {"Download": "dl_size_sess", "Upload": "ul_size_sess"},
+ "Transfer volume (Session)",
+ unit="b",
+ ),
+ stackplot(
+ hist,
+ {"Download": "dl_size", "Upload": "ul_size"},
+ "Transfer volume (Total)",
+ unit="b",
+ ),
+ lineplot(
+ hist,
+ {"Connections": "connections"},
+ "Peers",
+ unit=None,
+ smooth=smoothness,
+ ),
+ lineplot(
+ hist,
+ {"Bandwidth per connection": "bw_per_conn"},
+ "Connections",
+ unit="b/s",
+ smooth=smoothness,
+ ),
+ lineplot(hist, {"DHT Nodes": "dht_nodes"}, "DHT", unit=None),
+ ],
+ ]
+ calendar = {"movies":[],"episodes":[]}
+ for movie in c.radarr.calendar():
+ calendar["movies"].append(movie)
+ for episode in c.sonarr.calendar():
+ t = episode['airDateUtc'].rstrip("Z").split(".")[0]
+ t = datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
+ episode['hasAired']=datetime.today()>t
+ calendar["episodes"].append({"episode":episode,"series":series[episode["seriesId"]]})
+ return {"data": data, "images": imgs, "hist": hist,"calendar": calendar}
+
+
+if os.path.isfile("stats.json"):
+ with open("stats.json", "r") as of:
+ try:
+ hist = json.load(of)["hist"]
+ except Exception as e:
+ print("Error loading history:", str(e))
+
+
+def update():
+ print("Updating...")
+ try:
+ stats = collect_stats()
+ except Exception as e:
+ print("Error collectin statistics:", str(e))
+ stats = None
+ if stats:
+ with open("stats_temp.json", "w") as of:
+ json.dump(stats, of)
+ open("stats.lock", "w").close()
+ if os.path.isfile("stats.json"):
+ os.unlink("stats.json")
+ os.rename("stats_temp.json", "stats.json")
+ os.unlink("stats.lock")
+ print("Done!")
+
+def loop(seconds):
+ while True:
+ update()
+ time.sleep(seconds)
+
+
+if __name__=="__main__":
+ update()
\ No newline at end of file
diff --git a/templates/base.html b/templates/base.html
new file mode 100644
index 0000000..cf117d3
--- /dev/null
+++ b/templates/base.html
@@ -0,0 +1,40 @@
+{% from 'bootstrap/utils.html' import render_messages %}
+
+
+
+
+ {% block head %}
+
+
+ {% block styles %}
+ {{ bootstrap.load_css() }}
+
+ {% endblock %}
+ MediaDash
+ {% endblock %}
+
+
+ {% block navbar %}
+
+ MediaDash
+
+
+
+
+ {{nav.left_nav.render(renderer='bootstrap4')}}
+
+
+
+ {% endblock %}
+ {% block content %}
+
+ {{render_messages()}}
+ {% block app_content %}{% endblock %}
+
+ {% endblock %}
+
+ {% block scripts %}
+ {{ bootstrap.load_js(with_popper=False) }}
+ {% endblock %}
+
+
\ No newline at end of file
diff --git a/templates/config.html b/templates/config.html
new file mode 100644
index 0000000..1d7e4c7
--- /dev/null
+++ b/templates/config.html
@@ -0,0 +1,70 @@
+{% extends "base.html" %}
+{% from 'utils.html' import custom_render_form_row,make_tabs %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+
+{% set col_size = ('lg',2,6) %}
+{% set col_size_seq = ('lg',10,1) %}
+
+{% macro render_fields(fields) %}
+ {% for field in fields %}
+ {% if field is sequence %}
+ {{ custom_render_form_row(field|list,col_map={'transcode_edit':('lg',1),'transcode_new':('lg',1)},render_args={'form_type':'horizontal'}) }}
+ {% else %}
+ {{ custom_render_form_row([field],render_args={'form_type':'horizontal','horizontal_columns':col_size}) }}
+ {% endif %}
+ {% endfor %}
+{% endmacro %}
+
+{% set config_tabs = [] %}
+{% for name, fields in [
+ ('Jellyfin',[form.jellyfin_url,form.jellyfin_username,form.jellyfin_passwd]),
+ ('QBittorrent',[form.qbt_url,form.qbt_username,form.qbt_passwd]),
+ ('Sonarr',[form.sonarr_url,form.sonarr_api_key]),
+ ('Radarr',[form.radarr_url,form.radarr_api_key]),
+ ('Portainer',[form.portainer_url,form.portainer_username,form.portainer_passwd]),
+ ('Jackett',[form.jackett_url,form.jackett_api_key]),
+ ('Transcode',[form.transcode_default_profile,form.transcode_profiles]),
+] %}
+ {% do config_tabs.append((name,render_fields(fields))) %}
+{% endfor %}
+
+{% block app_content %}
+{{title}}
+{% if test %}
+{% if test.success %}
+
+
Sucess
+
+{% else %}
+
+ {% for module,error in test.errors.items() %}
+ {% if error %}
+
{{module}}
+ {% if error is mapping %}
+ {% for key,value in error.items() %}
+
{{key}} : {{value}}
+ {% endfor %}
+ {% else %}
+
{{error}}
+ {% endif %}
+ {% endif %}
+ {% endfor %}
+
+{% endif %}
+{% endif %}
+{% for field in form %}
+ {% for error in field.errors %}
+ {{error}}
+ {% endfor %}
+{% endfor %}
+
+
+
+ {# render_form(form, form_type ="horizontal", button_map={'test':'primary','save':'success'}) #}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/containers/details.html b/templates/containers/details.html
new file mode 100644
index 0000000..f940688
--- /dev/null
+++ b/templates/containers/details.html
@@ -0,0 +1,14 @@
+{% extends "base.html" %}
+
+{% block app_content %}
+
+
+ Env
+ {{container.Config.Env|join("\n")}}
+
+ {{container|tojson(indent=4)}}
+{% endblock %}
\ No newline at end of file
diff --git a/templates/containers/index.html b/templates/containers/index.html
new file mode 100644
index 0000000..89f2c34
--- /dev/null
+++ b/templates/containers/index.html
@@ -0,0 +1,58 @@
+{% extends "base.html" %}
+{% from "utils.html" import make_tabs %}
+
+{% macro container_row(info) %}
+
+
+ Image
+
+
+
+ Status
+
+
+ {{info.Status}}
+
+
+
+
+
{{info|tojson(indent=4)}}
+
+{% endmacro %}
+
+{% block app_content %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/history.html b/templates/history.html
new file mode 100644
index 0000000..a0fe6c0
--- /dev/null
+++ b/templates/history.html
@@ -0,0 +1,65 @@
+{%- extends "base.html" -%}
+{%- from 'utils.html' import make_tabs -%}
+
+{%- macro default(event,source) -%}
+ Unknown ({{source}})
+ {{event|tojson(indent=4)}}
+{%- endmacro -%}
+
+{%- macro downloadFolderImported(event,source) -%}
+ [{{event.seriesId}}/{{event.episodeId}}] Imported {{event.data.droppedPath}} from {{event.data.downloadClientName}} to {{event.data.importedPath}}
+{%- endmacro -%}
+
+{%- macro grabbed(event,source) -%}
+ [{{event.seriesId}}/{{event.episodeId}}] Grabbed {{event.sourceTitle}}
+{%- endmacro -%}
+
+{%- macro episodeFileDeleted(event,source) -%}
+ [{{event.seriesId}}/{{event.episodeId}}] Deleted {{event.sourceTitle}} because {{event.data.reason}}
+{%- endmacro -%}
+
+{%- macro episodeFileRenamed(event,source) -%}
+ [{{event.seriesId}}/{{event.episodeId}}] Renamed {{event.data.sourcePath}} to {{event.data.path}}
+{%- endmacro -%}
+
+{%- macro movieFileDeleted(event,source) -%}
+ Renamed {{event.data.sourcePath}} to {{event.data.path}}
+{%- endmacro -%}
+
+{%- macro movieFileRenamed(event,source) -%}
+ renamed
+ {{event|tojson(indent=4)}}
+{%- endmacro -%}
+
+{%- macro downloadFailed(event,source) -%}
+ downloadFailed
+ {{event|tojson(indent=4)}}
+{%- endmacro -%}
+
+{%- set handlers = {
+ 'downloadFolderImported': downloadFolderImported,
+ 'grabbed': grabbed,
+ 'episodeFileDeleted': episodeFileDeleted,
+ 'episodeFileRenamed': episodeFileRenamed,
+ 'movieFileDeleted': movieFileDeleted,
+ 'movieFileRenamed': movieFileRenamed,
+ 'downloadFailed': downloadFailed,
+ None: default
+} -%}
+
+{%- macro history_page(history,source) -%}
+
+ {%- for entry in history.records -%}
+ {{handlers.get(entry.eventType,handlers[None])(entry,source)}}{{'\n'}}
+ {%- endfor -%}
+
+{%- endmacro -%}
+
+{%- block app_content -%}
+History
+
+
+ {{make_tabs([('Sonarr',history_page(sonarr,'sonarr')),('Radarr',history_page(radarr,'radarr'))])}}
+
+
+{%- endblock -%}
\ No newline at end of file
diff --git a/templates/index.html b/templates/index.html
new file mode 100644
index 0000000..74c1ce8
--- /dev/null
+++ b/templates/index.html
@@ -0,0 +1,122 @@
+{% extends "base.html" %}
+
+{% macro make_row(title,items) %}
+
+ {% for item in items %}
+ {{item|safe}}
+ {% endfor %}
+
+{% endmacro %}
+
+{% macro make_tabs(tabs) %}
+
+
+
+ {% for (label,_) in tabs %}
+ {% set slug = (label|slugify) %}
+ {% if not (loop.first and loop.last) %}
+
+
+ {{label}}
+
+
+ {% endif %}
+ {% endfor %}
+
+
+
+
+
+ {% for (label,items) in tabs %}
+ {% set slug = (label|slugify) %}
+
+ {{make_row(label,items)}}
+
+ {% endfor %}
+
+{% endmacro %}
+
+{% macro upcoming(data) %}
+
+
+
+
Movies
+
+
+ Title
+ In Cinemas
+ Digital Release
+
+ {% for movie in data.calendar.movies %}
+ {% if movie.isAvailable and movie.hasFile %}
+ {% set row_class = "bg-success" %}
+ {% elif movie.isAvailable and not movie.hasFile %}
+ {% set row_class = "bg-danger" %}
+ {% elif not movie.isAvailable and movie.hasFile %}
+ {% set row_class = "bg-primary" %}
+ {% elif not movie.isAvailable and not movie.hasFile %}
+ {% set row_class = "bg-info" %}
+ {% endif %}
+
+
+
+ {{movie.title}}
+
+
+ {{movie.inCinemas|fromiso|ago_dt_utc_human(rnd=0)}}
+ {{movie.digitalRelease|fromiso|ago_dt_utc_human(rnd=0)}}
+
+ {% endfor %}
+
+
Episodes
+
+
+
+ Season | Episode Number
+ Show
+ Title
+ Air Date
+
+ {% for entry in data.calendar.episodes %}
+ {% if entry.episode.hasAired and entry.episode.hasFile %}
+ {% set row_class = "bg-success" %}
+ {% elif entry.episode.hasAired and not entry.episode.hasFile %}
+ {% set row_class = "bg-danger" %}
+ {% elif not entry.episode.hasAired and entry.episode.hasFile %}
+ {% set row_class = "bg-primary" %}
+ {% elif not entry.episode.hasAired and not entry.episode.hasFile %}
+ {% set row_class = "bg-info" %}
+ {% endif %}
+
+ {{entry.episode.seasonNumber}} | {{entry.episode.episodeNumber}}
+
+
+ {{entry.series.title}}
+
+
+ {{entry.episode.title}}
+ {{entry.episode.airDateUtc|fromiso|ago_dt_utc_human(rnd=0)}}
+
+ {% endfor %}
+
+
+
+
+{% endmacro %}
+
+{% block app_content %}
+ {% if data is none %}
+ No Data available!
+ {% else %}
+ {% set tabs = [] %}
+ {% do tabs.append(("Upcoming",[upcoming(data)])) %}
+ {% for row in data.images %}
+ {% if row[0] is string %}
+ {% set title=row[0] %}
+ {% set row=row[1:] %}
+ {% do tabs.append((title,row)) %}
+ {% endif %}
+ {% endfor %}
+ {{make_tabs(tabs)}}
+ {% endif %}
+{% endblock %}
diff --git a/templates/jellyfin/index.html b/templates/jellyfin/index.html
new file mode 100644
index 0000000..09c68d5
--- /dev/null
+++ b/templates/jellyfin/index.html
@@ -0,0 +1,121 @@
+{% extends "base.html" %}
+{% from 'utils.html' import custom_render_form_row,make_tabs %}
+{% from 'bootstrap/utils.html' import render_icon %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+
+{% block app_content %}
+
+Jellyfin v{{jellyfin.info.Version}}
+
+
+
+
Active Streams
+
+
+ Episode
+ Show
+ Language
+ User
+ Device
+ Mode
+
+ {% for session in jellyfin.sessions %}
+ {% if "NowPlayingItem" in session %}
+ {% with np=session.NowPlayingItem, ps=session.PlayState%}
+
+
+ {% if session.SupportsMediaControl %}
+
+ {{render_icon("stop-circle")}}
+
+ {% endif %}
+
+ {{np.Name}}
+
+ ({{(ps.PositionTicks/10_000_000)|timedelta(digits=0)}}/{{(np.RunTimeTicks/10_000_000)|timedelta(digits=0)}})
+ {% if ps.IsPaused %}
+ (Paused)
+ {% endif %}
+
+
+
+ {{np.SeriesName}}
+
+
+ ({{np.SeasonName}})
+
+
+
+
+ {% if ("AudioStreamIndex" in ps) and ("SubtitleStreamIndex" in ps) %}
+ {{np.MediaStreams[ps.AudioStreamIndex].Language or "None"}}/{{np.MediaStreams[ps.SubtitleStreamIndex].Language or "None"}}
+ {% else %}
+ Unk/Unk
+ {% endif %}
+
+
+
+ {{session.UserName}}
+
+
+
+ {{session.DeviceName}}
+
+
+ {% if ps.PlayMethod =="Transcode" %}
+
+ {{ps.PlayMethod}}
+
+ {% else %}
+
+ {{ps.PlayMethod}}
+
+ {% endif %}
+
+
+ {% endwith %}
+ {% endif %}
+ {% endfor %}
+
+
+
+
+
+
+
Users
+
+
+ Name
+ Last Login
+ Last Active
+ Bandwidth Limit
+
+ {% for user in jellyfin.users|sort(attribute="LastLoginDate",reverse=True) %}
+
+
+
+ {{user.Name}}
+
+
+
+ {% if "LastLoginDate" in user %}
+ {{user.LastLoginDate|fromiso|ago_dt_utc(2)}} ago
+ {% else %}
+ Never
+ {% endif %}
+
+
+ {% if "LastActivityDate" in user %}
+ {{user.LastActivityDate|fromiso|ago_dt_utc(2)}} ago
+ {% else %}
+ Never
+ {% endif %}
+
+ {{user.Policy.RemoteClientBitrateLimit|filesizeformat(binary=False)}}/s
+
+ {% endfor %}
+
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/logs.html b/templates/logs.html
new file mode 100644
index 0000000..081c1c2
--- /dev/null
+++ b/templates/logs.html
@@ -0,0 +1,39 @@
+{% extends "base.html" %}
+
+{% block app_content %}
+
+
QBittorrent
+
+ {% set t_first = logs.qbt[0].timestamp %}
+ {% for message in logs.qbt if "WebAPI login success" not in message.message %}
+ {%set type={1: 'status' , 2: 'info', 4: 'warning', 8:'danger'}.get(message.type,none) %}
+ {%set type_name={1: 'NORMAL' , 2: 'INFO', 4: 'WARNING', 8:'CRITICAL'}.get(message.type,none) %}
+
+ [{{((message.timestamp-t_first)/1000) | timedelta}}|{{type_name}}] {{message.message.strip()}}
+
+ {% endfor %}
+
+
+
Sonarr
+
+ {% set t_first = (logs.sonarr.records[0].time)|fromiso %}
+ {% for message in logs.sonarr.records %}
+ {%set type={'warn': 'warning', 'error':'danger'}.get(message.level,message.level) %}
+
+ [{{message.time | fromiso | ago_dt}}|{{message.logger}}|{{message.level|upper}}] {{message.message.strip()}}
+
+ {% endfor %}
+
+
+
Radarr
+
+ {% set t_first = (logs.radarr.records[0].time)|fromiso %}
+ {% for message in logs.radarr.records %}
+ {%set type={'warn': 'warning', 8:'danger'}.get(message.level,message.level) %}
+
+ [{{message.time | fromiso | ago_dt}}|{{message.logger}}|{{message.level|upper}}] {{message.message.strip()}}
+
+ {% endfor %}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/qbittorrent/details.html b/templates/qbittorrent/details.html
new file mode 100644
index 0000000..c9b5b7d
--- /dev/null
+++ b/templates/qbittorrent/details.html
@@ -0,0 +1,236 @@
+{% extends "base.html" %}
+{% from "utils.html" import render_tree %}
+
+
+{% block scripts %}
+{{super()}}
+
+{% endblock %}
+
+{% block app_content %}
+
+
+
+
+
+
+
+ {{(qbt.info.progress*100)|round(2)}} %
+
+
+
+
+
+
+
+
+
+
+ {{qbt.info.state[0]}}
+
+ {% if qbt.info.category %}
+ {{qbt.info.category}}
+ {% endif %}
+
+
+
+Info
+
+
+
+ Total Size
+
+
+ {{qbt.info.size|filesizeformat(binary=True)}} ({{[0,qbt.info.size-qbt.info.downloaded]|max|filesizeformat(binary=True)}} left)
+
+
+ Files
+
+
+ {{qbt.files|count}}
+
+
+
+
+
+
+ Downloaded
+
+
+ {{qbt.info.downloaded|filesizeformat(binary=True)}} ({{qbt.info.dlspeed|filesizeformat(binary=True)}}/s)
+
+
+ Uploaded
+
+
+ {{qbt.info.uploaded|filesizeformat(binary=True)}} ({{qbt.info.upspeed|filesizeformat(binary=True)}}/s)
+
+
+
+Health
+
+
+
+ Last Active
+
+
+ {{qbt.info.last_activity|ago(clamp=True)}} Ago
+
+
+ Age
+
+
+ {{qbt.info.added_on|ago}}
+
+
+
+
+
+
+
+ Avg. DL rate
+
+
+ {{(qbt.info.downloaded/((qbt.info.added_on|ago).total_seconds()))|filesizeformat(binary=True)}}/s
+ (A: {{(qbt.info.downloaded/qbt.info.time_active)|filesizeformat(binary=True)}}/s)
+
+
+ Avg. UL rate
+
+
+ {{(qbt.info.uploaded/((qbt.info.added_on|ago).total_seconds()))|filesizeformat(binary=True)}}/s
+ (A: {{(qbt.info.uploaded/qbt.info.time_active)|filesizeformat(binary=True)}}/s)
+
+
+
+
+
+
+ ETC (DL rate while active)
+
+
+
+ {% set dl_rate_act = (qbt.info.downloaded/qbt.info.time_active) %}
+ {% if dl_rate_act>0 %}
+ {{((qbt.info.size-qbt.info.downloaded)/dl_rate_act)|round(0)|timedelta(clamp=true)}}
+ {% else %}
+ N/A
+ {% endif %}
+
+
+
+ ETC (avg. DL rate)
+
+
+ {% set dl_rate = (qbt.info.downloaded/((qbt.info.added_on|ago(clamp=True)).total_seconds())) %}
+ {% if dl_rate>0 %}
+ {{((qbt.info.size-qbt.info.downloaded)/dl_rate)|round(0)|timedelta(clamp=true)}}
+ {% else %}
+ N/A
+ {% endif %}
+
+
+
+
+
+
+ Total active time
+
+
+ {{qbt.info.time_active|timedelta}}
+
+
+
+ Availability
+
+
+ {% if qbt.info.availability==-1 %}
+ N/A
+ {% else %}
+ {{(qbt.info.availability*100)|round(2)}} %
+ {% endif %}
+
+
+
+Swarm
+
+
+
+ Seeds
+
+
+ {{qbt.info.num_seeds}}
+
+
+ Leechers
+
+
+ {{qbt.info.num_leechs}}
+
+
+
+
+
+
+ Last seen completed
+
+
+ {{qbt.info.seen_complete|ago}} Ago
+
+
+
+
+
+
+Files
+
+{{render_tree(qbt.files|sort(attribute='name')|list|make_tree)}}
+
+
+
+{% for tracker in qbt.trackers|sort(attribute='total_peers', reverse=true) %}
+
+
+ {% if tracker.has_url %}
+
{{tracker.name}}
+ {% else %}
+ {{tracker.name}}
+ {% endif %}
+ {% if tracker.message %}
+
{{tracker.message}}
+ {% endif %}
+
+
+ {{tracker.status[0]}}
+ (S: {{tracker.num_seeds[1]}}, L: {{tracker.num_leeches[1]}}, P: {{tracker.num_peers[1]}}, D: {{tracker.num_downloaded[1]}})
+
+
+{% endfor %}
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/qbittorrent/index.html b/templates/qbittorrent/index.html
new file mode 100644
index 0000000..b67595b
--- /dev/null
+++ b/templates/qbittorrent/index.html
@@ -0,0 +1,138 @@
+{% extends "base.html" %}
+
+{% macro torrent_entry(torrent) %}
+ {% set state_label,badge_type = status_map[torrent.state] or (torrent.state,'light') %}
+
+
+ {{torrent.name|truncate(75)}}
+ (DL: {{torrent.dlspeed|filesizeformat(binary=true)}}/s, UL: {{torrent.upspeed|filesizeformat(binary=true)}}/s)
+ {{state_label}}
+ {% if torrent.category %}
+ {{torrent.category}}
+ {% endif %}
+
+
+
+
+
+
+
{{(torrent.progress*100)|round(2)}} % (ETA: {{[torrent.eta,torrent.eta_act]|min|round(0)|timedelta(clamp=true)}})
+
+
+{% endmacro %}
+
+{% block app_content %}
+
+
+ QBittorrent
+ {{qbt.version}}
+ (DL: {{qbt.server_state.dl_info_speed|filesizeformat(binary=True)}}/s,
+ UL: {{qbt.server_state.up_info_speed|filesizeformat(binary=True)}}/s)
+
+
+
+
+ Total Uploaded
+
+
+ {{qbt.server_state.alltime_ul|filesizeformat(binary=True)}}
+
+
+ Total Downloaded
+
+
+ {{qbt.server_state.alltime_dl|filesizeformat(binary=True)}}
+
+
+
+
+
+
+ Session Uploaded
+
+
+ {{qbt.server_state.up_info_data|filesizeformat(binary=True)}}
+
+
+ Session Downloaded
+
+
+ {{qbt.server_state.dl_info_data|filesizeformat(binary=True)}}
+
+
+
+
+
+ Torrents
+
+
+ {{qbt.torrents|length}}
+
+
+ Total Queue Size
+
+
+ {{qbt.torrents.values()|map(attribute='size')|sum|filesizeformat(binary=true)}}
+
+
+
+
+
+
+
+{% for state,torrents in qbt.torrents.values()|sort(attribute='state')|groupby('state') %}
+ {% set state_label,badge_type = status_map[state] or (state,'light') %}
+
+
+
+ {{torrents|length}}
+
+
+{% endfor %}
+
+{% if state_filter %}
+
+{% endif %}
+
+
+
+
+
+
+ {% for torrent in qbt.torrents.values()|sort(attribute=sort_by,reverse=True) %}
+ {% set state_label,badge_type = status_map[torrent.state] or (torrent.state,'light') %}
+ {% if state_filter %}
+ {% if torrent.state==state_filter %}
+ {{torrent_entry(torrent)}}
+ {% endif %}
+ {% else %}
+ {{torrent_entry(torrent)}}
+ {% endif %}
+ {% endfor %}
+
+
+
+{% endblock %}
diff --git a/templates/radarr/details.html b/templates/radarr/details.html
new file mode 100644
index 0000000..e69de29
diff --git a/templates/radarr/index.html b/templates/radarr/index.html
new file mode 100644
index 0000000..ed0d839
--- /dev/null
+++ b/templates/radarr/index.html
@@ -0,0 +1,28 @@
+{% extends "base.html" %}
+{% from 'utils.html' import make_tabs %}
+
+{% macro movie_list() %}
+ {% for movie in movies|sort(attribute='sortTitle') %}
+
+ {{movie.title}}
+ ({{movie.year}})
+ {% for genre in movie.genres %}
+ {{genre}}
+ {% endfor %}
+ {{movie.status|title}}
+
+ {% endfor %}
+{% endmacro %}
+
+{% block app_content %}
+
+ Radarr
+ v{{status.version}} ({{movies|count}} Movies)
+
+
+
+{% endblock %}
diff --git a/templates/remote/add.html b/templates/remote/add.html
new file mode 100644
index 0000000..6149ff0
--- /dev/null
+++ b/templates/remote/add.html
@@ -0,0 +1,23 @@
+{% extends "base.html" %}
+{% from 'utils.html' import custom_render_form_row,make_tabs %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+
+{% block app_content %}
+
+{% if form %}
+Grant remote access
+{% endif %}
+
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/remote/index.html b/templates/remote/index.html
new file mode 100644
index 0000000..ef46980
--- /dev/null
+++ b/templates/remote/index.html
@@ -0,0 +1,78 @@
+{% extends "base.html" %}
+{% from 'utils.html' import custom_render_form_row,make_tabs %}
+{% from 'bootstrap/utils.html' import render_icon %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+
+{% block app_content %}
+
+
+
+
+
+
+
+
+
+
+ Name
+ Last Login
+ Last Active
+ Bandwidth Limit
+
+ {% for user in jf|sort(attribute="LastLoginDate",reverse=True) %}
+
+
+
+ {{user.Name}}
+
+
+
+ {% if "LastLoginDate" in user %}
+ {{user.LastLoginDate|fromiso|ago_dt_utc(2)}} ago
+ {% else %}
+ Never
+ {% endif %}
+
+
+ {% if "LastActivityDate" in user %}
+ {{user.LastActivityDate|fromiso|ago_dt_utc(2)}} ago
+ {% else %}
+ Never
+ {% endif %}
+
+ {{user.Policy.RemoteClientBitrateLimit|filesizeformat(binary=False)}}/s
+
+ {% endfor %}
+
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/search/details.html b/templates/search/details.html
new file mode 100644
index 0000000..6581834
--- /dev/null
+++ b/templates/search/details.html
@@ -0,0 +1,10 @@
+{% extends "base.html" %}
+
+{% block app_content %}
+{{info.title}} ({{info.year}})
+{{info.hasFile}}
+{{info.id}}
+
+ {{info|tojson(indent=4)}}
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/search/include/movie.html b/templates/search/include/movie.html
new file mode 100644
index 0000000..c86d6d5
--- /dev/null
+++ b/templates/search/include/movie.html
@@ -0,0 +1,18 @@
+{% macro movie_results(results) -%}
+
+ {%for result in results %}
+
+ {% endfor %}
+
+{% endmacro %}
diff --git a/templates/search/include/torrent.html b/templates/search/include/torrent.html
new file mode 100644
index 0000000..0e3671f
--- /dev/null
+++ b/templates/search/include/torrent.html
@@ -0,0 +1,123 @@
+
+{% macro torrent_result_row(result,with_tracker=false) -%}
+
+
+
+
+
+
+
+
+
+ {{result.Title}}
+
+ {% if result.DownloadVolumeFactor==0.0 %}
+ Freeleech
+ {% endif %}
+ {% if result.UploadVolumeFactor > 1.0 %}
+ UL x{{result.UploadVolumeFactor}}
+ {% endif %}
+
+
+
+
+ {{result.CategoryDesc}}
+
+
+ {{result.Size|filesizeformat}}
+
+ {% if with_tracker %}
+
+
+ {{result.Tracker}}
+
+
+ {% endif %}
+
+ ({{result.Seeders}}/{{result.Peers}}/{{ "?" if result.Grabs is none else result.Grabs}})
+
+
+{% endmacro %}
+
+{% macro torrent_result_grouped(results) %}
+{% if results %}
+
+ {% for tracker,results in results.Results|groupby(attribute="Tracker") %}
+
+
+
+ {{tracker}} ({{results|length}})
+
+
+
+
+ Name
+
+
+
+
+ Category
+
+
+ Size
+
+
+ Seeds/Peers/Grabs
+
+
+
+ {%for result in results|sort(attribute='Gain',reverse=true) %}
+ {{ torrent_result_row(result,with_tracker=false) }}
+ {% endfor %}
+ {% endfor %}
+
+
+
+
+
+
+{% endif %}
+{% endmacro %}
+
+
+{% macro torrent_results(results,group_by_tracker=false) %}
+
+{% endmacro %}
diff --git a/templates/search/include/tv_show.html b/templates/search/include/tv_show.html
new file mode 100644
index 0000000..2ea7e38
--- /dev/null
+++ b/templates/search/include/tv_show.html
@@ -0,0 +1,23 @@
+
+{% macro tv_show_results(results) -%}
+
+ {% for result in results %}
+
+ {% endfor %}
+
+{% endmacro %}
diff --git a/templates/search/index.html b/templates/search/index.html
new file mode 100644
index 0000000..5cd8a0d
--- /dev/null
+++ b/templates/search/index.html
@@ -0,0 +1,64 @@
+{% extends "base.html" %}
+{% from "utils.html" import make_tabs, custom_render_form_row %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+{% from 'bootstrap/table.html' import render_table %}
+{% from 'search/include/tv_show.html' import tv_show_results with context %}
+{% from 'search/include/movie.html' import movie_results with context %}
+{% from 'search/include/torrent.html' import torrent_results with context %}
+{% block styles %}
+{{super()}}
+
+{% endblock %}
+
+{% block app_content %}
+
+{% if form %}
+Search
+{% endif %}
+
+
+
+ {% if session.new_torrents %}
+
+ {% for torrent in session.pop('new_torrents',{}).values() %}
+
+ Added {{torrent.name}}
+
+ {% endfor %}
+
+ {% endif %}
+ {% if form %}
+
+ {% else %}
+
Search results for '{{search_term}}'
+ {% endif %}
+
+
+
+{% set search_results = [
+ (results.tv_shows,"tv","TV Shows",tv_show_results,{}),
+ (results.movies,"movie","Movies",movie_results,{}),
+ (results.torrents,"torrent","Torrents",torrent_results,{"group_by_tracker":group_by_tracker}),
+] %}
+
+{% if results %}
+ {% set tabs = [] %}
+ {% for results,id_name,label,func,kwargs in search_results if results %}
+ {% do tabs.append((label,func(results,**kwargs))) %}
+ {% endfor %}
+ {{make_tabs(tabs)}}
+{% endif %}
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/sonarr/details.html b/templates/sonarr/details.html
new file mode 100644
index 0000000..e69de29
diff --git a/templates/sonarr/index.html b/templates/sonarr/index.html
new file mode 100644
index 0000000..c97bc08
--- /dev/null
+++ b/templates/sonarr/index.html
@@ -0,0 +1,28 @@
+{% extends "base.html" %}
+{% from 'utils.html' import make_tabs %}
+
+{% macro series_list() %}
+ {% for show in series|sort(attribute='sortTitle') %}
+
+ {{show.title}}
+ ({{show.year}})
+ {% for genre in show.genres %}
+ {{genre}}
+ {% endfor %}
+ {{show.status|title}}
+
+ {% endfor %}
+{% endmacro %}
+
+{% block app_content %}
+
+ Sonarr
+ v{{status.version}} ({{series|count}} Shows)
+
+
+
+
+ {{series_list()}}
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/test.html b/templates/test.html
new file mode 100644
index 0000000..0aa44b8
--- /dev/null
+++ b/templates/test.html
@@ -0,0 +1,43 @@
+{% extends "base.html" %}
+{% from 'bootstrap/form.html' import render_form %}
+
+{% block scripts %}
+{{super()}}
+
+{% endblock %}
+
+
+{% block app_content__ %}
+
+ {{render_form(form)}}
+
+{% endblock %}
+
+
+
+{% block app_content %}
+ {% for i in range(100) %}
+
+ {% endfor %}
+{% endblock %}
\ No newline at end of file
diff --git a/templates/transcode/profiles.html b/templates/transcode/profiles.html
new file mode 100644
index 0000000..8af0bbd
--- /dev/null
+++ b/templates/transcode/profiles.html
@@ -0,0 +1,30 @@
+{% extends "base.html" %}
+{% from 'utils.html' import make_tabs %}
+{% from 'bootstrap/form.html' import render_form, render_field, render_form_row %}
+
+{% macro profile_list() %}
+ {% for name, cfg in config.APP_CONFIG.transcode_profiles.items() %}
+ {{name}}
+ {{cfg.doc}}
+ ffmpeg -i <infile> {{cfg.command}} <outfile>
+ {% if cfg.vars %}
+ {% for var,doc in cfg.vars.items() %}
+
+
{{var}}
+ ({{doc}}{% if cfg.defaults[var] %}, Default: {{cfg.defaults[var]}} {% endif %})
+ {% endfor %}
+ {% endif %}
+
+ {% endfor %}
+{% endmacro %}
+
+{% block app_content %}
+
+
+
+
Transcode profiles
+ {{profile_list()}}
+
+
+
+{% endblock %}
\ No newline at end of file
diff --git a/templates/utils.html b/templates/utils.html
new file mode 100644
index 0000000..3e905ca
--- /dev/null
+++ b/templates/utils.html
@@ -0,0 +1,85 @@
+
+{% from 'bootstrap/form.html' import render_field %}
+
+{% macro custom_render_form_row(fields, row_class='form-row', col_class_default='col', col_map={}, button_map={}, button_style='', button_size='', render_args={}) %}
+
+ {% for field in fields %}
+ {% if field.name in col_map %}
+ {% set col_class = col_map[field.name] %}
+ {% else %}
+ {% set col_class = col_class_default %}
+ {% endif %}
+
+ {{ render_field(field, button_map=button_map, button_style=button_style, button_size=button_size, **render_args) }}
+
+ {% endfor %}
+
+{% endmacro %}
+
+{% macro make_tabs(tabs)%}
+ {% set tabs_id = tabs|tojson|hash %}
+
+
+
+ {% for label,tab in tabs if tab %}
+ {% set id_name = [loop.index,tabs_id ]|join("-") %}
+ {% if not (loop.first and loop.last) %}
+
+
+ {{label}}
+
+
+ {% endif %}
+ {% endfor %}
+
+
+
+
+
+
+ {% for label,tab in tabs if tab %}
+ {% set id_name = [loop.index,tabs_id ]|join("-") %}
+
+ {{ tab|safe }}
+
+ {% endfor %}
+
+
+
+{% endmacro %}
+
+{% macro render_tree(tree) -%}
+
+ {% for node,children in tree.items() recursive %}
+ {% if node=="__info__" or not children is mapping -%}
+ {% set file = children %}
+
+
+
+
+ {{(file.progress*100)|round(2)}} % ({{file.size|filesizeformat(binary=True)}})
+
+
+
+
+
+
+ {% else -%}
+
+
+ {{node}}
+
+ {% if children.items() -%}
+
+ {{loop(children.items())}}
+
+ {% endif %}
+
+ {% endif %}
+ {% endfor %}
+
+{% endmacro %}
\ No newline at end of file
diff --git a/transcode.py b/transcode.py
new file mode 100644
index 0000000..1894ff1
--- /dev/null
+++ b/transcode.py
@@ -0,0 +1,143 @@
+import subprocess as SP
+import json
+import shlex
+import time
+import os
+import io
+import sys
+import uuid
+from tqdm import tqdm
+from utils import handle_config
+
+profiles = handle_config().get("transcode_profiles", {})
+
+profiles[None] = {
+ "command": "-vcodec copy -acodec copy -scodec copy -f null",
+ "doc": "null output for counting frames",
+}
+
+
+def ffprobe(file):
+ cmd = [
+ "ffprobe",
+ "-v",
+ "error",
+ "-print_format",
+ "json",
+ "-show_format",
+ "-show_streams",
+ file,
+ ]
+ try:
+ out = SP.check_output(cmd)
+ except KeyboardInterrupt:
+ raise
+ except:
+ return file, None
+ return file, json.loads(out)
+
+
+def make_ffmpeg_command_line(infile, outfile, profile=None, **kwargs):
+ default_opts = ["-v", "error", "-y", "-nostdin"]
+ ffmpeg = (
+ "C:\\Users\\Earthnuker\\scoop\\apps\\ffmpeg-nightly\\current\\bin\\ffmpeg.exe"
+ )
+ cmdline = profile["command"]
+ opts = profile.get("defaults", {}).copy()
+ opts.update(kwargs)
+
+ if isinstance(cmdline, str):
+ cmdline = shlex.split(cmdline)
+ cmdline = list(cmdline or [])
+ cmdline += ["-progress", "-", "-nostats"]
+ ret = [ffmpeg, *default_opts, "-i", infile, *cmdline, outfile]
+ ret = [v.format(**opts) for v in ret]
+ return ret
+
+
+def count_frames(file, **kwargs):
+ total_frames = None
+ for state in run_transcode(file, os.devnull, None):
+ if state.get("progress") == "end":
+ total_frames = int(state.get("frame", -1))
+ if total_frames is None:
+ return total_frames
+ if total_frames <= 0:
+ total_frames = None
+ return total_frames
+
+
+def run_transcode(file, outfile, profile, job_id=None, **kwargs):
+ job_id = job_id or str(uuid.uuid4())
+ stderr_fh = None
+ if outfile != os.devnull:
+ stderr_fh = open("{}.log".format(job_id), "w")
+ proc = SP.Popen(
+ make_ffmpeg_command_line(file, outfile, profiles[profile], **kwargs),
+ stdout=SP.PIPE,
+ stderr=stderr_fh,
+ encoding="utf8",
+ )
+ state = {}
+ poll = None
+ while poll is None:
+ poll = proc.poll()
+ state["ret"] = poll
+ if outfile != os.devnull:
+ with open("{}.log".format(job_id), "r") as tl:
+ state["stderr"] = tl.read()
+ line = proc.stdout.readline().strip()
+ if not line:
+ continue
+ try:
+ key, val = line.split("=", 1)
+ except ValueError:
+ print(line)
+ continue
+ key = key.strip()
+ val = val.strip()
+ state[key] = val
+ if key == "progress":
+ yield state
+ if stderr_fh:
+ stderr_fh.close()
+ os.unlink(stderr_fh.name)
+ yield state
+
+
+def transcode(file, outfile, profile, job_id=None, **kwargs):
+ from pprint import pprint
+
+ info = ffprobe(file)
+ frames = count_frames(file)
+ progbar = tqdm(desc="Processing {}".format(outfile), total=frames, unit=" frames", disable=False,leave=False)
+ for state in run_transcode(file, outfile, profile, job_id, **kwargs):
+ if "frame" in state:
+ progbar.n = int(state["frame"])
+ progbar.update(0)
+ state["total_frames"] = frames
+ state["file"] = file
+ state["outfile"] = outfile
+ # progbar.write(state["stderr"])
+ yield state
+ progbar.close()
+
+
+def preview_command(file, outfile, profile, **kwargs):
+ return make_ffmpeg_command_line(file, outfile, profiles[profile], **kwargs)
+
+
+if __name__ == "__main__":
+ file = sys.argv[1]
+ for profile in ["H.265 transcode", "H.264 transcode"]:
+ for preset in ["ultrafast", "fast", "medium", "slow", "veryslow"]:
+ for crf in list(range(10, 54, 4))[::-1]:
+ outfile = os.path.join("E:\\","transcode",profile,"{}_{}.mkv".format(crf, preset))
+ os.makedirs(os.path.dirname(outfile), exist_ok=True)
+ if os.path.isfile(outfile):
+ print("Skipping",outfile)
+ continue
+ for _ in transcode(
+ file, outfile, profile, "transcode", preset=preset, crf=crf
+ ):
+ pass
diff --git a/utils.py b/utils.py
new file mode 100644
index 0000000..788ea65
--- /dev/null
+++ b/utils.py
@@ -0,0 +1,196 @@
+from flask_nav.renderers import Renderer, SimpleRenderer
+from dominate import tags
+import asteval
+import operator as op
+import textwrap
+import math
+import sys
+import random
+import string
+from functools import wraps
+from urllib.request import urlopen
+from io import BytesIO
+import subprocess as SP
+import shlex
+import json
+import os
+
+from PIL import Image
+from PIL import ImageFont
+from PIL import ImageDraw
+
+
+def handle_config(cfg=None):
+ if cfg is None:
+ if os.path.isfile("config.json"):
+ with open("config.json") as fh:
+ return json.load(fh)
+ with open("config.json", "w") as fh:
+ cfg = json.dump(cfg, fh, indent=4)
+ return
+
+
+def with_application_context(app):
+ def inner(func):
+ @wraps(func)
+ def wrapper(*args, **kwargs):
+ with app.app_context():
+ return func(*args, **kwargs)
+
+ return wrapper
+
+ return inner
+
+
+def getsize(text, font_size):
+ font = ImageFont.truetype("arial.ttf", font_size)
+ return font.getsize_multiline(text)
+
+
+def does_text_fit(text, width, height, font_size):
+ w, h = getsize(text, font_size)
+ return w < width and h < height
+
+
+def make_placeholder_image(text, width, height, poster=None, wrap=0):
+ width = int(width)
+ height = int(height)
+ wrap = int(wrap)
+ font_size = 1
+ bounds = (0, 1)
+ if wrap:
+ text = textwrap.fill(text, wrap)
+ while True:
+ if not does_text_fit(text, width, height, bounds[1]):
+ break
+ bounds = (bounds[1], bounds[1] * 2)
+ prev_bounds = None
+ while True:
+ if does_text_fit(text, width, height, bounds[1]):
+ bounds = (int(round(sum(bounds) / 2, 0)), bounds[1])
+ else:
+ bounds = (bounds[0], int(round(sum(bounds) / 2, 0)))
+ if prev_bounds == bounds:
+ break
+ prev_bounds = bounds
+ font_size = bounds[0]
+ io = BytesIO()
+ im = Image.new("RGBA", (width, height), "#222")
+ draw = ImageDraw.Draw(im)
+ font = ImageFont.truetype("arial.ttf", font_size)
+ w, h = getsize(text, font_size)
+ if poster:
+ try:
+ with urlopen(poster) as fh:
+ poster = Image.open(fh)
+ except Exception as e:
+ poster = None
+ else:
+ poster_size = poster.size
+ factor = width / poster_size[0]
+ new_size = (
+ math.ceil(poster_size[0] * factor),
+ math.ceil(poster_size[1] * factor),
+ )
+ poster = poster.resize(new_size)
+ mid = -int((poster.size[1] - height) / 2)
+ im.paste(poster, (0, mid))
+ draw.text(((width - w) / 2, (height - h) / 2), text, fill="#eee", font=font)
+ im.save(io, "PNG")
+ io.seek(0)
+ return io
+
+
+def make_tree(files, child_key="children"):
+ tree = {}
+ for file in files:
+ root = tree
+ parts = file["name"].split("/")
+ for item in parts:
+ if item not in root:
+ root[item] = {}
+ prev_root = root
+ root = root[item]
+ prev_root[item] = {"__info__": file}
+ return tree
+
+
+class BootsrapRenderer(Renderer):
+ def visit_Navbar(self, node):
+ sub = []
+ for item in node.items:
+ sub.append(self.visit(item))
+ ret = tags.ul(sub, cls="navbar-nav mr-auto")
+ return ret
+
+ def visit_View(self, node):
+ classes = ["nav-link"]
+ if node.active:
+ classes.append("active")
+ return tags.li(
+ tags.a(node.text, href=node.get_url(), cls=" ".join(classes)),
+ cls="nav-item",
+ )
+
+ def visit_Subgroup(self, node):
+ url = "#"
+ classes = []
+ child_active = False
+ if node.title == "":
+ active = False
+ for item in node.items:
+ if item.active:
+ classes.append("active")
+ break
+ node, *children = node.items
+ for c in children:
+ if c.active:
+ child_active = True
+ break
+ node.items = children
+ node.title = node.text
+ url = node.get_url()
+ dropdown = tags.ul(
+ [
+ tags.li(
+ tags.a(
+ item.text,
+ href=item.get_url(),
+ cls="nav-link active" if item.active else "nav-link",
+ style="",
+ ),
+ cls="nav-item",
+ )
+ for item in node.items
+ ],
+ cls="dropdown-menu ",
+ )
+ link = tags.a(
+ node.title,
+ href=url,
+ cls="nav-link active" if node.active else "nav-link",
+ style="",
+ )
+ toggle = tags.a(
+ [],
+ cls="dropdown-toggle nav-link active"
+ if child_active
+ else "dropdown-toggle nav-link",
+ data_toggle="dropdown",
+ href="#",
+ style="padding-left: 0px; padding-top: 10px",
+ )
+ # almost the same as visit_Navbar, but written a bit more concise
+ return [link, tags.li([toggle, dropdown], cls="dropdown nav-item")]
+
+
+def eval_expr(expr, ctx=None):
+ aeval = asteval.Interpreter(minimal=True, use_numpy=False, symtable=ctx)
+ return aeval(expr)
+
+
+def sort_by(values, expr):
+ return sorted(value, key=lambda v: eval_expr(expr, v))
+
+def genpw(num=20):
+ return "".join(random.choice(string.ascii_lowercase+string.ascii_uppercase+string.digits) for _ in range(num))