MediaDash/api/qbittorrent.py

156 lines
5.8 KiB
Python
Raw Normal View History

2021-12-13 18:11:43 +00:00
import time
from urllib.parse import urljoin, urlparse
import requests as RQ
class QBittorrent(object):
status_map = {
"downloading": ("Downloading", "primary"),
"uploading": ("Seeding", "success"),
"forcedDL": ("Downloading [Forced]", "primary"),
"forcedUP": ("Seeding [Forced]", "success"),
"pausedDL": ("Downloading [Paused]", "secondary"),
"pausedUP": ("Seeding [Paused]", "secondary"),
"stalledDL": ("Downloading [Stalled]", "warning"),
"stalledUP": ("Seeding [Stalled]", "warning"),
"metaDL": ("Downloading metadata", "primary"),
"error": ("Error", "danger"),
"missingFiles": ("Missing Files", "danger"),
"queuedUP": ("Seeding [Queued]", "info"),
"queuedDL": ("Downloading [Queued]", "info"),
}
tracker_status = {
0: ("Disabled", "secondary"),
1: ("Not contacted", "info"),
2: ("Working", "success"),
3: ("Updating", "warning"),
4: ("Not working", "danger"),
}
def __init__(self, url, username, passwd):
self.url = url
self.username = username
self.passwd = passwd
self.rid = int(time.time())
self.session = RQ.Session()
url = urljoin(self.url, "/api/v2/auth/login")
self.session.post(
url, data={"username": self.username, "password": self.passwd}
).raise_for_status()
def get(self, url, **kwargs):
kwargs["rid"] = self.rid
url = urljoin(self.url, url)
res = self.session.get(url, params=kwargs)
res.raise_for_status()
try:
return res.json()
except ValueError:
return res.text
def add(self, **kwargs):
self.rid += 1
url = urljoin(self.url, "/api/v2/torrents/add")
ret = self.session.post(url, data=kwargs)
return ret.text, ret.status_code
def add_trackers(self, infohash, trackers=None):
if trackers is None:
trackers = []
for tracker_list in [
"https://newtrackon.com/api/live",
"https://ngosang.github.io/trackerslist/trackers_best.txt",
]:
try:
trackers_res = RQ.get(tracker_list)
trackers_res.raise_for_status()
except Exception as e:
print("Error getting tracker list:", e)
continue
trackers += trackers_res.text.split()
url = urljoin(self.url, "/api/v2/torrents/addTrackers")
data = {"hash": infohash, "urls": "\n\n".join(trackers)}
ret = self.session.post(url, data=data)
ret.raise_for_status()
return ret.text
def poll(self, infohash=None):
if infohash:
ret = {}
res = self.get("/api/v2/torrents/info", hashes=infohash)
ret["info"] = res
for endpoint in ["properties", "trackers", "webseeds", "files"]:
url = "/api/v2/torrents/{}".format(endpoint)
res = self.get(url, hash=infohash)
if endpoint == "trackers":
for v in res:
if v["tier"] == "":
v["tier"] = -1
v["status"] = self.tracker_status.get(
v["status"], ("Unknown", "light")
)
v["total_peers"] = (
v["num_seeds"] + v["num_leeches"] + v["num_peers"]
)
for k in [
"num_seeds",
"num_leeches",
"total_peers",
"num_downloaded",
"num_peers",
]:
if v[k] < 0:
v[k] = (-1, "?")
else:
v[k] = (v[k], v[k])
ret[endpoint] = res
ret["info"] = ret["info"][0]
ret["info"]["state"] = self.status_map.get(
ret["info"]["state"], (ret["info"]["state"], "light")
)
for tracker in ret["trackers"]:
tracker["name"] = urlparse(
tracker["url"]).netloc or tracker["url"]
tracker["has_url"] = bool(urlparse(tracker["url"]).netloc)
return ret
res = self.get("/api/v2/sync/maindata")
if "torrents" in res:
for k, v in res["torrents"].items():
v["hash"] = k
v["speed"] = v["upspeed"] + v["dlspeed"]
dl_rate = v["downloaded"] / max(0, time.time() - v["added_on"])
if dl_rate > 0:
v["eta"] = max(0, (v["size"] - v["downloaded"]) / dl_rate)
else:
v["eta"] = 0
if v["time_active"] == 0:
dl_rate = 0
else:
dl_rate = v["downloaded"] / v["time_active"]
if dl_rate > 0:
v["eta_act"] = max(
0, (v["size"] - v["downloaded"]) / dl_rate)
else:
v["eta_act"] = 0
res["torrents"][k] = v
res["version"] = self.get("/api/v2/app/version")
self.rid = res["rid"]
return res
def status(self, infohash=None):
self.rid += 1
return self.poll(infohash)
def peer_log(self, limit=0):
return self.get("/api/v2/log/peers")[-limit:]
def log(self, limit=0):
return self.get("/api/v2/log/main")[-limit:]
def test(self):
self.poll()
return {}