push latest changes
This commit is contained in:
parent
7523a19d1f
commit
cb2b5c2c2b
63 changed files with 3158 additions and 1552 deletions
143
api/__init__.py
Normal file
143
api/__init__.py
Normal file
|
@ -0,0 +1,143 @@
|
|||
import io
|
||||
from fabric import Connection
|
||||
|
||||
from utils import genpw, handle_config
|
||||
|
||||
from .jackett import Jackett
|
||||
from .jellyfin import Jellyfin
|
||||
from .portainer import Portainer
|
||||
from .qbittorrent import QBittorrent
|
||||
from .radarr import Radarr
|
||||
from .sonarr import Sonarr
|
||||
|
||||
|
||||
class Client(object):
|
||||
def __init__(self, cfg=None):
|
||||
if cfg is None:
|
||||
cfg = handle_config()
|
||||
self.cfg = cfg
|
||||
self.jackett = Jackett(cfg["jackett_url"], cfg["jackett_api_key"])
|
||||
self.sonarr = Sonarr(cfg["sonarr_url"], cfg["sonarr_api_key"])
|
||||
self.radarr = Radarr(cfg["radarr_url"], cfg["radarr_api_key"])
|
||||
self.jellyfin = Jellyfin(
|
||||
cfg["jellyfin_url"], cfg["jellyfin_user"], cfg["jellyfin_password"]
|
||||
)
|
||||
self.qbittorent = QBittorrent(
|
||||
cfg["qbt_url"], cfg["qbt_username"], cfg["qbt_passwd"]
|
||||
)
|
||||
self.portainer = Portainer(
|
||||
cfg["portainer_url"],
|
||||
cfg["portainer_username"],
|
||||
cfg["portainer_passwd"])
|
||||
self.ssh = Connection("root@server")
|
||||
|
||||
def _get_ssh_keys(self):
|
||||
res = self.ssh.get("/data/.ssh/authorized_keys", io.BytesIO())
|
||||
res.local.seek(0)
|
||||
ret = []
|
||||
for line in str(res.local.read(), "utf8").splitlines():
|
||||
if line.startswith("#"):
|
||||
continue
|
||||
else:
|
||||
key_type, key, comment = line.split(None, 2)
|
||||
ret.append((key_type, key, comment))
|
||||
return ret
|
||||
|
||||
def add_user(self, name, ssh_key):
|
||||
cfg = handle_config()
|
||||
user_config = cfg["jellyfin_user_config"]
|
||||
user_policy = cfg["jellyfin_user_policy"]
|
||||
passwd = genpw()
|
||||
res = self.ssh.get("/data/.ssh/authorized_keys", io.BytesIO())
|
||||
res.local.seek(0)
|
||||
keys = [
|
||||
line.split(
|
||||
None,
|
||||
2) for line in str(
|
||||
res.local.read(),
|
||||
"utf8").splitlines()]
|
||||
key_type, key, *_ = ssh_key.split()
|
||||
keys.append([key_type, key, name])
|
||||
new_keys = []
|
||||
seen_keys = set()
|
||||
for key_type, key, key_name in keys:
|
||||
if key not in seen_keys:
|
||||
seen_keys.add(key)
|
||||
new_keys.append([key_type, key, key_name])
|
||||
new_keys_file = "\n".join(" ".join(key) for key in new_keys)
|
||||
self.ssh.put(
|
||||
io.BytesIO(bytes(new_keys_file, "utf8")),
|
||||
"/data/.ssh/authorized_keys",
|
||||
preserve_mode=False,
|
||||
)
|
||||
user = self.jellyfin.post(
|
||||
"Users/New",
|
||||
json={
|
||||
"Name": name,
|
||||
"Password": passwd})
|
||||
user = user.json()
|
||||
self.jellyfin.post(
|
||||
"Users/{Id}/Configuration".format(**user), json=user_config)
|
||||
self.jellyfin.post(
|
||||
"Users/{Id}/Policy".format(**user), json=user_policy)
|
||||
return passwd
|
||||
|
||||
def queue(self, ids=[]):
|
||||
ret = []
|
||||
for item in self.sonarr.queue():
|
||||
if not ids or item.get("seriesId") in ids:
|
||||
item["type"] = "sonarr"
|
||||
ret.append(item)
|
||||
for item in self.radarr.queue():
|
||||
item["download"] = self.qbittorent.status(item["downloadId"])
|
||||
if not ids or item.get("movieId") in ids:
|
||||
item["type"] = "radarr"
|
||||
ret.append(item)
|
||||
return ret
|
||||
|
||||
@staticmethod
|
||||
def test(cls, cfg=None):
|
||||
modules = [
|
||||
(
|
||||
"Jackett",
|
||||
lambda cfg: Jackett(cfg["jackett_url"], cfg["jackett_api_key"]),
|
||||
),
|
||||
("Sonarr", lambda cfg: Sonarr(cfg["sonarr_url"], cfg["sonarr_api_key"])),
|
||||
("Radarr", lambda cfg: Radarr(cfg["radarr_url"], cfg["radarr_api_key"])),
|
||||
(
|
||||
"QBittorrent",
|
||||
lambda cfg: QBittorrent(
|
||||
cfg["qbt_url"], cfg["qbt_username"], cfg["qbt_passwd"]
|
||||
),
|
||||
),
|
||||
(
|
||||
"Jellyfin",
|
||||
lambda cfg: Jellyfin(
|
||||
cfg["jellyfin_url"],
|
||||
cfg["jellyfin_username"],
|
||||
cfg["jellyfin_passwd"],
|
||||
),
|
||||
),
|
||||
(
|
||||
"Portainer",
|
||||
lambda cfg: Portainer(
|
||||
cfg["portainer_url"],
|
||||
cfg["portainer_username"],
|
||||
cfg["portainer_passwd"],
|
||||
),
|
||||
),
|
||||
]
|
||||
errors = {}
|
||||
success = True
|
||||
for mod, Module in modules:
|
||||
try:
|
||||
print("Testing", mod)
|
||||
errors[mod] = Module(cfg).test()
|
||||
if errors[mod]:
|
||||
success = False
|
||||
except Exception as e:
|
||||
print(dir(e))
|
||||
errors[mod] = str(e)
|
||||
success = False
|
||||
print(errors)
|
||||
return {"success": success, "errors": errors}
|
45
api/jackett.py
Normal file
45
api/jackett.py
Normal file
|
@ -0,0 +1,45 @@
|
|||
import time
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests as RQ
|
||||
|
||||
|
||||
class Jackett(object):
|
||||
def __init__(self, url, api_key):
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
self.session = RQ.Session()
|
||||
self.session.post("http://192.168.2.25:9117/jackett/UI/Dashboard")
|
||||
|
||||
def search(self, query, indexers=None):
|
||||
params = {"apikey": self.api_key,
|
||||
"Query": query, "_": str(int(time.time()))}
|
||||
if indexers:
|
||||
params["Tracker[]"] = indexers
|
||||
res = self.session.get(
|
||||
urljoin(self.url, "api/v2.0/indexers/all/results"), params=params
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
for val in res["Results"]:
|
||||
for prop in ["Gain", "Seeders", "Peers", "Grabs", "Files"]:
|
||||
val[prop] = val.get(prop) or 0
|
||||
return res
|
||||
|
||||
def indexers(self):
|
||||
return [
|
||||
(t["id"], t["name"])
|
||||
for t in self.session.get(urljoin(self.url, "api/v2.0/indexers")).json()
|
||||
if t.get("configured")
|
||||
]
|
||||
|
||||
def test(self):
|
||||
errors = {}
|
||||
for idx, name in self.indexers():
|
||||
print("Testing indexer", name)
|
||||
result = self.session.post(
|
||||
urljoin(self.url, "api/v2.0/indexers/{}/test".format(idx))
|
||||
)
|
||||
if result.text:
|
||||
errors[name] = result.json()["error"]
|
||||
return errors
|
333
api/jellyfin.py
Normal file
333
api/jellyfin.py
Normal file
|
@ -0,0 +1,333 @@
|
|||
import time
|
||||
import base64
|
||||
from urllib.parse import urljoin
|
||||
from datetime import timedelta
|
||||
|
||||
import requests as RQ
|
||||
from dateutil.parser import parse as parse_datetime
|
||||
|
||||
from utils import timed_cache
|
||||
|
||||
|
||||
class Jellyfin(object):
|
||||
def __init__(self, url, user, password):
|
||||
self.url = url
|
||||
self.session = RQ.Session()
|
||||
self.device_id = str(
|
||||
base64.b64encode(
|
||||
"MediaDash ({})".format(
|
||||
self.session.headers["User-Agent"]).encode("utf-8")),
|
||||
"utf8",
|
||||
)
|
||||
self.auth_headers = {
|
||||
"X-Emby-Authorization": 'MediaBrowser Client="MediaDash", Device="Python", DeviceId="{}", Version="{}"'.format(
|
||||
self.device_id, RQ.__version__)}
|
||||
self.user = None
|
||||
if user is not None:
|
||||
res = self.login_user(user, password)
|
||||
self.api_key = res["AccessToken"]
|
||||
else:
|
||||
self.api_key = password
|
||||
self.auth_headers = {
|
||||
"X-Emby-Authorization": 'MediaBrowser Client="MediaDash", Device="Python", DeviceId="{}", Version="{}", Token="{}"'.format(
|
||||
self.device_id, RQ.__version__, self.api_key)}
|
||||
# ws_url=self.url.replace("http","ws").rstrip("/")+"/?"+urlencode({"api_key":self.api_key,"deviceId":self.device_id})
|
||||
# self.ws = websocket.WebSocketApp(ws_url,on_open=print,on_error=print,on_message=print,on_close=print)
|
||||
# self.ws_thread = Thread(target=self.ws.run_forever,daemon=True)
|
||||
self.session.headers.update(
|
||||
{**self.auth_headers, "X-Emby-Token": self.api_key})
|
||||
self.user = self.get_self()
|
||||
self.user_id = self.user["Id"]
|
||||
self.playstate_commands = sorted(
|
||||
[
|
||||
"Stop",
|
||||
"Pause",
|
||||
"Unpause",
|
||||
"NextTrack",
|
||||
"PreviousTrack",
|
||||
"Seek",
|
||||
"Rewind",
|
||||
"FastForward",
|
||||
"PlayPause",
|
||||
]
|
||||
)
|
||||
self.session_commands = sorted(
|
||||
[
|
||||
"MoveUp",
|
||||
"MoveDown",
|
||||
"MoveLeft",
|
||||
"MoveRight",
|
||||
"PageUp",
|
||||
"PageDown",
|
||||
"PreviousLetter",
|
||||
"NextLetter",
|
||||
"ToggleOsd",
|
||||
"ToggleContextMenu",
|
||||
"Select",
|
||||
"Back",
|
||||
"TakeScreenshot",
|
||||
"SendKey",
|
||||
"SendString",
|
||||
"GoHome",
|
||||
"GoToSettings",
|
||||
"VolumeUp",
|
||||
"VolumeDown",
|
||||
"Mute",
|
||||
"Unmute",
|
||||
"ToggleMute",
|
||||
"SetVolume",
|
||||
"SetAudioStreamIndex",
|
||||
"SetSubtitleStreamIndex",
|
||||
"ToggleFullscreen",
|
||||
"DisplayContent",
|
||||
"GoToSearch",
|
||||
"DisplayMessage",
|
||||
"SetRepeatMode",
|
||||
"ChannelUp",
|
||||
"ChannelDown",
|
||||
"Guide",
|
||||
"ToggleStats",
|
||||
"PlayMediaSource",
|
||||
"PlayTrailers",
|
||||
"SetShuffleQueue",
|
||||
"PlayState",
|
||||
"PlayNext",
|
||||
"ToggleOsdMenu",
|
||||
"Play",
|
||||
]
|
||||
)
|
||||
|
||||
def login_user(self, user, passwd):
|
||||
res = self.post(
|
||||
"Users/AuthenticateByName",
|
||||
json={"Username": user, "Pw": passwd},
|
||||
headers=self.auth_headers,
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
self.session.headers.update(
|
||||
{**self.auth_headers, "X-Emby-Token": res["AccessToken"]}
|
||||
)
|
||||
return res
|
||||
|
||||
def logout(self):
|
||||
self.session.post(urljoin(self.url, "Sessions/Logout"))
|
||||
|
||||
def status(self):
|
||||
res = self.session.get(urljoin(self.url, "System/Info"))
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
def chapter_image_url(self, item_id, chapter_num, tag):
|
||||
return urljoin(
|
||||
self.url,
|
||||
"Items",
|
||||
item_id,
|
||||
"Images",
|
||||
"Chapter",
|
||||
chapter_num)
|
||||
|
||||
def rq(self, method, url, *args, **kwargs):
|
||||
res = self.session.request(
|
||||
method, urljoin(
|
||||
self.url, url), *args, **kwargs)
|
||||
res.raise_for_status()
|
||||
return res
|
||||
|
||||
def get(self, url, *args, **kwargs):
|
||||
res = self.session.get(urljoin(self.url, url), *args, **kwargs)
|
||||
res.raise_for_status()
|
||||
return res
|
||||
|
||||
def post(self, url, *args, **kwargs):
|
||||
res = self.session.post(urljoin(self.url, url), *args, **kwargs)
|
||||
res.raise_for_status()
|
||||
return res
|
||||
|
||||
def sessions(self):
|
||||
res = self.get("Sessions")
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
@timed_cache()
|
||||
def season_episodes(self, item_id, season_id):
|
||||
res = self.get(
|
||||
"Shows/{}/Episodes".format(item_id),
|
||||
params={
|
||||
"UserId": self.user_id,
|
||||
"seasonId": season_id,
|
||||
"fields": "Overview,MediaStreams,MediaSources,ExternalUrls",
|
||||
},
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()["Items"]
|
||||
for episode in res:
|
||||
episode["Info"] = self.media_info(episode["Id"])
|
||||
return res
|
||||
|
||||
@timed_cache()
|
||||
def seasons(self, item_id):
|
||||
res = self.get(
|
||||
"Shows/{}/Seasons".format(item_id),
|
||||
params={
|
||||
"UserId": self.user_id,
|
||||
"fields": "Overview,MediaStreams,MediaSources,ExternalUrls",
|
||||
},
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()["Items"]
|
||||
for season in res:
|
||||
season["Episodes"] = self.season_episodes(item_id, season["Id"])
|
||||
return res
|
||||
|
||||
@timed_cache()
|
||||
def media_info(self, item_id):
|
||||
res = self.get(
|
||||
"Users/{}/Items/{}".format(self.user_id, item_id),
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
if res["Type"] == "Series":
|
||||
res["Seasons"] = self.seasons(item_id)
|
||||
return res
|
||||
|
||||
def system_info(self):
|
||||
res = self.get("System/Info")
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
def __get_child_items(self, item_id):
|
||||
print(item_id)
|
||||
res = self.get(
|
||||
"Users/{}/Items".format(self.user_id),
|
||||
params={"ParentId": item_id},
|
||||
)
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
def get_recursive(self, item_id):
|
||||
for item in self.__get_child_items(item_id).get("Items", []):
|
||||
yield item
|
||||
yield from self.get_recursive(item["Id"])
|
||||
|
||||
def get_counts(self):
|
||||
res = self.get("Items/Counts").json()
|
||||
return res
|
||||
|
||||
@timed_cache(seconds=10)
|
||||
def id_map(self):
|
||||
res = self.get(
|
||||
"Users/{}/Items".format(self.user_id),
|
||||
params={
|
||||
"recursive": True,
|
||||
"includeItemTypes": "Movie,Series",
|
||||
"collapseBoxSetItems": False,
|
||||
"fields": "ProviderIds",
|
||||
},
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()["Items"]
|
||||
id_map = {}
|
||||
for item in res:
|
||||
for _, prov_id in item["ProviderIds"].items():
|
||||
for prov in ["Imdb", "Tmdb", "Tvdb"]:
|
||||
id_map[(prov.lower(), prov_id)] = item["Id"]
|
||||
return id_map
|
||||
|
||||
@timed_cache()
|
||||
def get_library(self):
|
||||
res = self.get(
|
||||
"Users/{}/Items".format(self.user_id),
|
||||
params={
|
||||
"recursive": True,
|
||||
"includeItemTypes": "Movie,Series",
|
||||
"collapseBoxSetItems": False,
|
||||
},
|
||||
).json()
|
||||
library = {}
|
||||
for item in res["Items"]:
|
||||
library[item["Id"]] = item
|
||||
for item in res["Items"]:
|
||||
for key, value in item.copy().items():
|
||||
if key != "Id" and key.endswith("Id"):
|
||||
key = key[:-2]
|
||||
if value in library and key not in item:
|
||||
item[key] = library[value]
|
||||
return library
|
||||
|
||||
def get_usage(self):
|
||||
report = self.post(
|
||||
"user_usage_stats/submit_custom_query",
|
||||
params={"stamp": int(time.time())},
|
||||
json={
|
||||
"CustomQueryString": "SELECT * FROM PlaybackActivity",
|
||||
"ReplaceUserId": True,
|
||||
},
|
||||
).json()
|
||||
ret = []
|
||||
for row in report["results"]:
|
||||
rec = dict(zip(report["colums"], row))
|
||||
rec["PlayDuration"] = timedelta(seconds=int(rec["PlayDuration"]))
|
||||
ts = rec.pop("DateCreated")
|
||||
if ts:
|
||||
rec["Timestamp"] = parse_datetime(ts)
|
||||
ret.append(rec)
|
||||
return ret
|
||||
|
||||
def __db_fetch(self, endpoint):
|
||||
ret = []
|
||||
res = self.session.get(
|
||||
urljoin(
|
||||
self.url,
|
||||
endpoint),
|
||||
params={
|
||||
"StartIndex": 0,
|
||||
"IncludeItemTypes": "*",
|
||||
"ReportColumns": ""},
|
||||
)
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
headers = [h["Name"].lower() for h in res["Headers"]]
|
||||
for row in res["Rows"]:
|
||||
fields = [c["Name"] for c in row["Columns"]]
|
||||
ret.append(dict(zip(headers, fields)))
|
||||
ret[-1]["row_type"] = row["RowType"]
|
||||
return ret
|
||||
|
||||
def get_self(self):
|
||||
res = self.get("Users/Me")
|
||||
return res.json()
|
||||
|
||||
def get_users(self, user_id=None):
|
||||
if user_id:
|
||||
res = self.get("Users/{}".format(user_id))
|
||||
else:
|
||||
res = self.get("Users")
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
def activity(self):
|
||||
return self.__db_fetch("Reports/Activities")
|
||||
|
||||
def report(self):
|
||||
return self.__db_fetch("Reports/Items")
|
||||
|
||||
def stop_session(self, session_id):
|
||||
sessions = self.get("Sessions").json()
|
||||
for session in sessions:
|
||||
if session["Id"] == session_id and "NowPlayingItem" in session:
|
||||
s_id = session["Id"]
|
||||
u_id = session["UserId"]
|
||||
i_id = session["NowPlayingItem"]["Id"]
|
||||
d_id = session["DeviceId"]
|
||||
self.rq(
|
||||
"delete",
|
||||
"Videos/ActiveEncodings",
|
||||
params={"deviceId": d_id, "playSessionId": s_id},
|
||||
)
|
||||
self.rq("delete", f"Users/{u_id}/PlayingItems/{i_id}")
|
||||
self.rq("post", f"Sessions/{s_id}/Playing/Stop")
|
||||
|
||||
def test(self):
|
||||
self.status()
|
||||
return {}
|
75
api/portainer.py
Normal file
75
api/portainer.py
Normal file
|
@ -0,0 +1,75 @@
|
|||
import json
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests as RQ
|
||||
|
||||
|
||||
class Portainer(object):
|
||||
def __init__(self, url, username, passwd):
|
||||
self.url = url
|
||||
self.session = RQ.Session()
|
||||
jwt = self.session.post(
|
||||
urljoin(self.url, "api/auth"),
|
||||
json={"username": username, "password": passwd},
|
||||
).json()
|
||||
self.session.headers.update(
|
||||
{"Authorization": "Bearer {0[jwt]}".format(jwt)})
|
||||
|
||||
def containers(self, container_id=None):
|
||||
if container_id is None:
|
||||
res = self.session.get(
|
||||
urljoin(self.url, "api/endpoints/1/docker/containers/json"),
|
||||
params={
|
||||
"all": 1,
|
||||
"filters": json.dumps(
|
||||
{"label": ["com.docker.compose.project=tvstack"]}
|
||||
),
|
||||
},
|
||||
)
|
||||
else:
|
||||
res = self.session.get(
|
||||
urljoin(
|
||||
self.url,
|
||||
"api/endpoints/1/docker/containers/{}/json".format(container_id),
|
||||
))
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
if container_id is None:
|
||||
for container in res:
|
||||
pass
|
||||
# print("Gettings stats for",container['Id'])
|
||||
# container['stats']=self.stats(container['Id'])
|
||||
# container['top']=self.top(container['Id'])
|
||||
else:
|
||||
res["stats"] = self.stats(container_id)
|
||||
res["top"] = self.top(container_id)
|
||||
return res
|
||||
|
||||
def top(self, container_id):
|
||||
res = self.session.get(
|
||||
urljoin(
|
||||
self.url,
|
||||
"api/endpoints/1/docker/containers/{}/top".format(container_id),
|
||||
))
|
||||
res.raise_for_status()
|
||||
res = res.json()
|
||||
cols = res["Titles"]
|
||||
ret = []
|
||||
|
||||
return res
|
||||
|
||||
def stats(self, container_id):
|
||||
res = self.session.get(
|
||||
urljoin(
|
||||
self.url,
|
||||
"api/endpoints/1/docker/containers/{}/stats".format(container_id),
|
||||
),
|
||||
params={
|
||||
"stream": False},
|
||||
)
|
||||
res.raise_for_status()
|
||||
return res.json()
|
||||
|
||||
def test(self):
|
||||
self.containers()
|
||||
return {}
|
155
api/qbittorrent.py
Normal file
155
api/qbittorrent.py
Normal file
|
@ -0,0 +1,155 @@
|
|||
import time
|
||||
from urllib.parse import urljoin, urlparse
|
||||
|
||||
import requests as RQ
|
||||
|
||||
|
||||
class QBittorrent(object):
|
||||
|
||||
status_map = {
|
||||
"downloading": ("Downloading", "primary"),
|
||||
"uploading": ("Seeding", "success"),
|
||||
"forcedDL": ("Downloading [Forced]", "primary"),
|
||||
"forcedUP": ("Seeding [Forced]", "success"),
|
||||
"pausedDL": ("Downloading [Paused]", "secondary"),
|
||||
"pausedUP": ("Seeding [Paused]", "secondary"),
|
||||
"stalledDL": ("Downloading [Stalled]", "warning"),
|
||||
"stalledUP": ("Seeding [Stalled]", "warning"),
|
||||
"metaDL": ("Downloading metadata", "primary"),
|
||||
"error": ("Error", "danger"),
|
||||
"missingFiles": ("Missing Files", "danger"),
|
||||
"queuedUP": ("Seeding [Queued]", "info"),
|
||||
"queuedDL": ("Downloading [Queued]", "info"),
|
||||
}
|
||||
|
||||
tracker_status = {
|
||||
0: ("Disabled", "secondary"),
|
||||
1: ("Not contacted", "info"),
|
||||
2: ("Working", "success"),
|
||||
3: ("Updating", "warning"),
|
||||
4: ("Not working", "danger"),
|
||||
}
|
||||
|
||||
def __init__(self, url, username, passwd):
|
||||
self.url = url
|
||||
self.username = username
|
||||
self.passwd = passwd
|
||||
self.rid = int(time.time())
|
||||
self.session = RQ.Session()
|
||||
url = urljoin(self.url, "/api/v2/auth/login")
|
||||
self.session.post(
|
||||
url, data={"username": self.username, "password": self.passwd}
|
||||
).raise_for_status()
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
kwargs["rid"] = self.rid
|
||||
url = urljoin(self.url, url)
|
||||
res = self.session.get(url, params=kwargs)
|
||||
res.raise_for_status()
|
||||
try:
|
||||
return res.json()
|
||||
except ValueError:
|
||||
return res.text
|
||||
|
||||
def add(self, **kwargs):
|
||||
self.rid += 1
|
||||
url = urljoin(self.url, "/api/v2/torrents/add")
|
||||
ret = self.session.post(url, data=kwargs)
|
||||
return ret.text, ret.status_code
|
||||
|
||||
def add_trackers(self, infohash, trackers=None):
|
||||
if trackers is None:
|
||||
trackers = []
|
||||
for tracker_list in [
|
||||
"https://newtrackon.com/api/live",
|
||||
"https://ngosang.github.io/trackerslist/trackers_best.txt",
|
||||
]:
|
||||
try:
|
||||
trackers_res = RQ.get(tracker_list)
|
||||
trackers_res.raise_for_status()
|
||||
except Exception as e:
|
||||
print("Error getting tracker list:", e)
|
||||
continue
|
||||
trackers += trackers_res.text.split()
|
||||
url = urljoin(self.url, "/api/v2/torrents/addTrackers")
|
||||
data = {"hash": infohash, "urls": "\n\n".join(trackers)}
|
||||
ret = self.session.post(url, data=data)
|
||||
ret.raise_for_status()
|
||||
return ret.text
|
||||
|
||||
def poll(self, infohash=None):
|
||||
if infohash:
|
||||
ret = {}
|
||||
res = self.get("/api/v2/torrents/info", hashes=infohash)
|
||||
ret["info"] = res
|
||||
for endpoint in ["properties", "trackers", "webseeds", "files"]:
|
||||
url = "/api/v2/torrents/{}".format(endpoint)
|
||||
res = self.get(url, hash=infohash)
|
||||
if endpoint == "trackers":
|
||||
for v in res:
|
||||
if v["tier"] == "":
|
||||
v["tier"] = -1
|
||||
v["status"] = self.tracker_status.get(
|
||||
v["status"], ("Unknown", "light")
|
||||
)
|
||||
v["total_peers"] = (
|
||||
v["num_seeds"] + v["num_leeches"] + v["num_peers"]
|
||||
)
|
||||
for k in [
|
||||
"num_seeds",
|
||||
"num_leeches",
|
||||
"total_peers",
|
||||
"num_downloaded",
|
||||
"num_peers",
|
||||
]:
|
||||
if v[k] < 0:
|
||||
v[k] = (-1, "?")
|
||||
else:
|
||||
v[k] = (v[k], v[k])
|
||||
ret[endpoint] = res
|
||||
ret["info"] = ret["info"][0]
|
||||
ret["info"]["state"] = self.status_map.get(
|
||||
ret["info"]["state"], (ret["info"]["state"], "light")
|
||||
)
|
||||
for tracker in ret["trackers"]:
|
||||
tracker["name"] = urlparse(
|
||||
tracker["url"]).netloc or tracker["url"]
|
||||
tracker["has_url"] = bool(urlparse(tracker["url"]).netloc)
|
||||
return ret
|
||||
res = self.get("/api/v2/sync/maindata")
|
||||
if "torrents" in res:
|
||||
for k, v in res["torrents"].items():
|
||||
v["hash"] = k
|
||||
v["speed"] = v["upspeed"] + v["dlspeed"]
|
||||
dl_rate = v["downloaded"] / max(0, time.time() - v["added_on"])
|
||||
if dl_rate > 0:
|
||||
v["eta"] = max(0, (v["size"] - v["downloaded"]) / dl_rate)
|
||||
else:
|
||||
v["eta"] = 0
|
||||
if v["time_active"] == 0:
|
||||
dl_rate = 0
|
||||
else:
|
||||
dl_rate = v["downloaded"] / v["time_active"]
|
||||
if dl_rate > 0:
|
||||
v["eta_act"] = max(
|
||||
0, (v["size"] - v["downloaded"]) / dl_rate)
|
||||
else:
|
||||
v["eta_act"] = 0
|
||||
res["torrents"][k] = v
|
||||
res["version"] = self.get("/api/v2/app/version")
|
||||
self.rid = res["rid"]
|
||||
return res
|
||||
|
||||
def status(self, infohash=None):
|
||||
self.rid += 1
|
||||
return self.poll(infohash)
|
||||
|
||||
def peer_log(self, limit=0):
|
||||
return self.get("/api/v2/log/peers")[-limit:]
|
||||
|
||||
def log(self, limit=0):
|
||||
return self.get("/api/v2/log/main")[-limit:]
|
||||
|
||||
def test(self):
|
||||
self.poll()
|
||||
return {}
|
98
api/radarr.py
Normal file
98
api/radarr.py
Normal file
|
@ -0,0 +1,98 @@
|
|||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from urllib.parse import urljoin
|
||||
|
||||
import requests as RQ
|
||||
|
||||
from utils import timed_cache
|
||||
|
||||
|
||||
class Radarr(object):
|
||||
def __init__(self, url, api_key):
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
self.root_folder = self.get("api/v3/rootFolder")[0]["path"]
|
||||
self.quality_profile = self.get("api/v3/qualityprofile")[0]
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
kwargs["apikey"] = self.api_key
|
||||
kwargs["_"] = str(int(time.time()))
|
||||
res = RQ.get(urljoin(self.url, url), params=kwargs)
|
||||
res.raise_for_status()
|
||||
try:
|
||||
return res.json()
|
||||
except Exception:
|
||||
return res.text
|
||||
|
||||
def search(self, query):
|
||||
return self.get("api/v3/movie/lookup", term=query)
|
||||
|
||||
def status(self):
|
||||
return self.get("api/v3/system/status")
|
||||
|
||||
@timed_cache()
|
||||
def history(self, pageSize=500):
|
||||
return self.get(
|
||||
"api/v3/history",
|
||||
page=1,
|
||||
pageSize=500,
|
||||
sortDirection="descending",
|
||||
sortKey="date",
|
||||
)
|
||||
|
||||
@timed_cache()
|
||||
def calendar(self, days=90):
|
||||
today = datetime.today()
|
||||
start = today - timedelta(days=days)
|
||||
end = today + timedelta(days=days)
|
||||
return self.get(
|
||||
"api/v3/calendar",
|
||||
unmonitored=False,
|
||||
start=start.isoformat(),
|
||||
end=end.isoformat(),
|
||||
)
|
||||
|
||||
@timed_cache()
|
||||
def movies(self, movie_id=None):
|
||||
if movie_id is None:
|
||||
return self.get("api/v3/movie")
|
||||
return self.get("api/v3/movie/{}".format(movie_id))
|
||||
|
||||
@timed_cache(seconds=60)
|
||||
def queue(self, **kwargs):
|
||||
data = []
|
||||
page = 1
|
||||
while True:
|
||||
res = self.get("api/v3/queue", page=page, pageSize=100, **kwargs)
|
||||
data += res.get("records", [])
|
||||
page += 1
|
||||
if len(data) >= res.get("totalRecords", 0):
|
||||
break
|
||||
return data
|
||||
|
||||
def add(self, data):
|
||||
data["qualityProfileId"] = self.quality_profile["id"]
|
||||
data["minimumAvailability"] = 2 # InCinema
|
||||
data["rootFolderPath"] = self.root_folder
|
||||
data["addOptions"] = {"searchForMovie": True}
|
||||
params = dict(apikey=self.api_key)
|
||||
res = RQ.post(
|
||||
urljoin(
|
||||
self.url,
|
||||
"api/v3/movie"),
|
||||
json=data,
|
||||
params=params)
|
||||
return res.json()
|
||||
|
||||
def log(self, limit=0):
|
||||
return self.get(
|
||||
"api/v3/log",
|
||||
page=1,
|
||||
pageSize=(limit or 1024),
|
||||
sortDirection="descending",
|
||||
sortKey="time",
|
||||
)
|
||||
|
||||
def test(self):
|
||||
self.status()
|
||||
return {}
|
116
api/sonarr.py
Normal file
116
api/sonarr.py
Normal file
|
@ -0,0 +1,116 @@
|
|||
import time
|
||||
from urllib.parse import urljoin
|
||||
from datetime import datetime, timedelta
|
||||
import requests as RQ
|
||||
|
||||
from utils import timed_cache
|
||||
|
||||
|
||||
class Sonarr(object):
|
||||
def __init__(self, url, api_key):
|
||||
self.url = url
|
||||
self.api_key = api_key
|
||||
self.root_folder = self.get("api/v3/rootFolder")[0]["path"]
|
||||
self.quality_profile = self.get("api/v3/qualityprofile")[0]
|
||||
self.language_profile = self.get("api/v3/languageprofile")[0]
|
||||
|
||||
def get(self, url, **kwargs):
|
||||
kwargs["apikey"] = self.api_key
|
||||
kwargs["_"] = str(int(time.time()))
|
||||
res = RQ.get(urljoin(self.url, url), params=kwargs)
|
||||
res.raise_for_status()
|
||||
try:
|
||||
return res.json()
|
||||
except Exception:
|
||||
return res.text
|
||||
|
||||
def search(self, query):
|
||||
return self.get("api/v3/series/lookup", term=query)
|
||||
|
||||
def status(self):
|
||||
return self.get("api/v3/system/status")
|
||||
|
||||
@timed_cache()
|
||||
def history(self, pageSize=500):
|
||||
return self.get(
|
||||
"api/v3/history",
|
||||
page=1,
|
||||
pageSize=500,
|
||||
sortDirection="descending",
|
||||
sortKey="date",
|
||||
)
|
||||
|
||||
@timed_cache()
|
||||
def calendar(self, days=30):
|
||||
today = datetime.today()
|
||||
start = today - timedelta(days=days)
|
||||
end = today + timedelta(days=days)
|
||||
return self.get(
|
||||
"api/v3/calendar",
|
||||
unmonitored=False,
|
||||
start=start.isoformat(),
|
||||
end=end.isoformat(),
|
||||
)
|
||||
|
||||
@timed_cache()
|
||||
def series(self, series_id=None, keys=None):
|
||||
if series_id is None:
|
||||
return self.get("api/v3/series")
|
||||
ret = {}
|
||||
ret["series"] = self.get("api/v3/series/{}".format(series_id))
|
||||
ret["episodes"] = self.get("api/v3/episode", seriesId=series_id)
|
||||
ret["episodeFile"] = self.get("api/v3/episodeFile", seriesId=series_id)
|
||||
ret["queue"] = self.get("api/v3/queue/details", seriesId=series_id)
|
||||
return ret
|
||||
|
||||
@timed_cache(seconds=60)
|
||||
def queue(self, **kwargs):
|
||||
data = []
|
||||
page = 1
|
||||
while True:
|
||||
res = self.get("api/v3/queue", page=page, pageSize=100, **kwargs)
|
||||
data = res.get("records", [])
|
||||
page += 1
|
||||
if len(data) >= res.get("totalRecords", 0):
|
||||
break
|
||||
return data
|
||||
|
||||
@timed_cache()
|
||||
def details(self, episode_id):
|
||||
return self.get("api/v3/queue/details", episodeIds=episode_id)
|
||||
|
||||
@timed_cache()
|
||||
def episodes(self, series_id):
|
||||
return self.get("api/v3/episode", seriesId=series_id)
|
||||
|
||||
def add(self, data):
|
||||
data["qualityProfileId"] = self.quality_profile["id"]
|
||||
data["languageProfileId"] = self.language_profile["id"]
|
||||
data["rootFolderPath"] = self.root_folder
|
||||
data["addOptions"] = {
|
||||
"ignoreEpisodesWithoutFiles": False,
|
||||
"ignoreEpisodesWithFiles": True,
|
||||
"searchForMissingEpisodes": True,
|
||||
}
|
||||
data["seasonFolder"] = True
|
||||
params = dict(apikey=self.api_key)
|
||||
res = RQ.post(
|
||||
urljoin(
|
||||
self.url,
|
||||
"api/v3/series"),
|
||||
json=data,
|
||||
params=params)
|
||||
return res.json()
|
||||
|
||||
def log(self, limit=0):
|
||||
return self.get(
|
||||
"api/v3/log",
|
||||
page=1,
|
||||
pageSize=(limit or 1024),
|
||||
sortDirection="descending",
|
||||
sortKey="time",
|
||||
)
|
||||
|
||||
def test(self):
|
||||
self.status()
|
||||
return {}
|
34
api/user.py
Normal file
34
api/user.py
Normal file
|
@ -0,0 +1,34 @@
|
|||
from flask_login import UserMixin
|
||||
|
||||
from api import Jellyfin
|
||||
from utils import handle_config
|
||||
|
||||
|
||||
class JellyfinUser(UserMixin):
|
||||
def __init__(self, username, password):
|
||||
api = Jellyfin(handle_config()["jellyfin_url"], username, password)
|
||||
self.user = api.user
|
||||
self.api_key = api.api_key
|
||||
self.id = self.user["Id"]
|
||||
api.logout()
|
||||
|
||||
def __getitem__(self, key):
|
||||
return self.user[key]
|
||||
|
||||
@property
|
||||
def is_anonymous(self):
|
||||
return False
|
||||
|
||||
@property
|
||||
def is_admin(self):
|
||||
pol = self.user["Policy"]
|
||||
return pol["IsAdministrator"]
|
||||
|
||||
@property
|
||||
def is_authenticated(self):
|
||||
return True
|
||||
|
||||
@property
|
||||
def is_active(self):
|
||||
pol = self.user["Policy"]
|
||||
return not pol["IsDisabled"]
|
Loading…
Add table
Add a link
Reference in a new issue