MediaDash/stats_collect.py

409 lines
14 KiB
Python

import pylab as PL
from matplotlib.ticker import EngFormatter
from base64 import b64encode
from api import Client
from utils import handle_config
import time
import json
import io
import os
from urllib.parse import quote
from datetime import datetime
from concurrent.futures import ThreadPoolExecutor, as_completed
mpl_style = "dark_background"
smoothness = 5
def make_svg(data, dtype):
data_uri = "data:{};base64,{}".format(dtype, quote(str(b64encode(data), "ascii")))
return '<embed type="image/svg+xml" src="{}"/>'.format(data_uri)
def make_smooth(data, window_size):
ret = []
for i, _ in enumerate(data):
block = data[i : i + window_size]
ret.append(sum(block) / len(block))
return ret
def stackplot(data, names, title=None, color="#eee", unit=None, smooth=0):
fig = io.BytesIO()
with PL.style.context(mpl_style):
labels = []
values = []
for k, v in names.items():
t = list(map(datetime.fromtimestamp, data["t"]))
if smooth:
data[v] = make_smooth(data[v], smooth)
values.append(data[v])
labels.append(k)
PL.stackplot(t, values, labels=labels)
PL.legend()
PL.grid(True, ls="--")
PL.gcf().autofmt_xdate()
PL.gca().margins(x=0)
if title:
PL.title(title)
if unit:
PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def lineplot(data, names, title=None, color="#eee", unit=None, smooth=0):
fig = io.BytesIO()
with PL.style.context(mpl_style):
for k, v in names.items():
t = list(map(datetime.fromtimestamp, data["t"]))
if smooth:
data[v] = make_smooth(data[v], smooth)
PL.plot(t, data[v], label=k)
PL.legend()
PL.grid(True, ls="--")
PL.gcf().autofmt_xdate()
PL.gca().margins(x=0)
if title:
PL.title(title)
if unit:
PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def histogram(values, bins, title=None, color="#eee", unit=""):
fig = io.BytesIO()
with PL.style.context(mpl_style):
PL.hist(values, bins=bins, log=True)
if title:
PL.title(title)
PL.grid(True, ls="--")
PL.gca().xaxis.set_major_formatter(EngFormatter(unit=unit))
PL.gca().margins(x=0)
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def prc_label(label, idx, values):
return "{} ({}, {:.2%}%)".format(label, values[idx], values[idx] / sum(values))
def byte_labels(label, idx, values):
orig_values = list(values)
suffix = ["", "K", "M", "G", "T", "P", "E"]
i = 0
while values[idx] > 1024 and i < len(suffix):
values[idx] /= 1024
i += 1
val = "{:.2f} {}iB".format(values[idx], suffix[i])
return "{} ({}, {:.2%}%)".format(label, val, orig_values[idx] / sum(orig_values))
def byte_rate_labels(label, idx, values):
orig_values = list(values)
suffix = ["", "K", "M", "G", "T", "P", "E"]
i = 0
while values[idx] > 1024 and i < len(suffix):
values[idx] /= 1024
i += 1
val = "{:.2f} {}iB/s".format(values[idx], suffix[i])
return "{} ({})".format(label, val)
def piechart(items, title=None, labelfunc=prc_label, sort=True):
fig = io.BytesIO()
labels = []
values = []
colors = []
if sort:
items = sorted(items.items(), key=lambda v: v[1])
else:
items = sorted(items.items())
for k, v in items:
labels.append(k)
if isinstance(v, tuple) and len(v) == 2:
v, c = v
colors.append(c)
values.append(v)
colors = colors or None
for i, label in enumerate(labels):
labels[i] = labelfunc(label, i, values[:])
with PL.style.context(mpl_style):
PL.pie(values, labels=labels, colors=colors, labeldistance=None)
PL.legend()
if title:
PL.title(title)
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
hist = {
"t": [],
"dl": [],
"ul": [],
"dl_size": [],
"ul_size": [],
"dl_size_sess": [],
"ul_size_sess": [],
"connections": [],
"bw_per_conn": [],
"dht_nodes": [],
}
def update_qbt_hist(stats, limit=1024):
global hist
data = stats["qbt"]["status"]
hist["t"].append(time.time())
hist["dl"].append(data["server_state"]["dl_info_speed"])
hist["ul"].append(data["server_state"]["up_info_speed"])
hist["dl_size"].append(data["server_state"]["alltime_dl"])
hist["ul_size"].append(data["server_state"]["alltime_ul"])
hist["dl_size_sess"].append(data["server_state"]["dl_info_data"])
hist["ul_size_sess"].append(data["server_state"]["up_info_data"])
hist["connections"].append(data["server_state"]["total_peer_connections"])
hist["dht_nodes"].append(data["server_state"]["dht_nodes"])
hist["bw_per_conn"].append(
(data["server_state"]["dl_info_speed"] + data["server_state"]["up_info_speed"])
/ data["server_state"]["total_peer_connections"]
)
for k in hist:
hist[k] = hist[k][-limit:]
last_idx = 0
for i, (t1, t2) in enumerate(zip(hist["t"], hist["t"][1:])):
if abs(t1 - t2) > (60 * 60): # 1h
last_idx = i + 1
for k in hist:
hist[k] = hist[k][last_idx:]
return hist
def collect_stats():
from collections import Counter
PL.clf()
cfg = handle_config()
c = Client(cfg)
series={}
movies={}
data = {
"radarr": {"entries": c.radarr.movies(), "status": c.radarr.status()},
"sonarr": {
"entries": c.sonarr.series(),
"status": c.sonarr.status(),
"details": {},
},
"qbt": {"status": c.qbittorent.status()},
}
for show in data["sonarr"]["entries"]:
series[show["id"]]=show
for movie in data["radarr"]["entries"]:
movies[movie["id"]]=movie
torrent_states = {}
torrent_categories = {}
for torrent in data["qbt"]["status"]["torrents"].values():
state = c.qbittorent.status_map.get(torrent["state"], (torrent["state"], None))[
0
]
category = torrent["category"] or "<None>"
torrent_states.setdefault(state, 0)
torrent_categories.setdefault(category, 0)
torrent_states[state] += 1
torrent_categories[category] += 1
vbitrates = []
abitrates = []
acodecs = []
vcodecs = []
qualities = []
formats = []
sizes = {"Shows": 0, "Movies": 0}
radarr_stats = {"missing": 0, "available": 0}
for movie in data["radarr"]["entries"]:
if movie["hasFile"]:
radarr_stats["available"] += 1
else:
radarr_stats["missing"] += 1
sizes["Movies"] += movie.get("movieFile", {}).get("size", 0)
vbr = movie.get("movieFile", {}).get("mediaInfo", {}).get("videoBitrate", None)
abr = movie.get("movieFile", {}).get("mediaInfo", {}).get("audioBitrate", None)
acodec = movie.get("movieFile", {}).get("mediaInfo", {}).get("audioCodec", None)
vcodec = movie.get("movieFile", {}).get("mediaInfo", {}).get("videoCodec", None)
fmt = movie.get("movieFile", {}).get("relativePath", "").split(".")[-1].lower()
qual = (
movie.get("movieFile", {}).get("quality", {}).get("quality", {}).get("name")
)
if qual:
qualities.append(qual)
if acodec:
acodecs.append(acodec)
if vcodec:
if vcodec.lower() in ["x265", "h265", "hevc"]:
vcodec = "H.265"
if vcodec.lower() in ["x264", "h264"]:
vcodec = "H.264"
vcodecs.append(vcodec)
if vbr:
vbitrates.append(vbr)
if abr:
abitrates.append(abr)
if fmt:
formats.append(fmt)
sonarr_stats = {"missing": 0, "available": 0}
info_jobs = []
with ThreadPoolExecutor(16) as pool:
for show in data["sonarr"]["entries"]:
info_jobs.append(pool.submit(c.sonarr.series, show["id"]))
for job, show in zip(
as_completed(info_jobs),
data["sonarr"]["entries"],
):
info = job.result()
data["sonarr"]["details"][show["id"]] = info
for file in info["episodeFile"]:
vbr = file.get("mediaInfo", {}).get("videoBitrate", None)
abr = file.get("mediaInfo", {}).get("audioBitrate", None)
acodec = file.get("mediaInfo", {}).get("audioCodec", None)
vcodec = file.get("mediaInfo", {}).get("videoCodec", None)
fmt = file.get("relativePath", "").split(".")[-1].lower()
qual = file.get("quality", {}).get("quality", {}).get("name")
sizes["Shows"] += file.get("size", 0)
if qual:
qualities.append(qual)
if acodec:
acodecs.append(acodec)
if vcodec:
if vcodec.lower() in ["x265", "h265", "hevc"]:
vcodec = "H.265"
if vcodec.lower() in ["x264", "h264"]:
vcodec = "H.264"
vcodecs.append(vcodec)
if vbr:
vbitrates.append(vbr)
if abr:
abitrates.append(abr)
if fmt:
formats.append(fmt)
for season in show.get("seasons", []):
stats = season.get("statistics", {})
sonarr_stats["missing"] += (
stats["totalEpisodeCount"] - stats["episodeFileCount"]
)
sonarr_stats["available"] += stats["episodeFileCount"]
hist = update_qbt_hist(data)
sonarr_stats["available"] = (sonarr_stats["available"], "#5f5")
sonarr_stats["missing"] = (sonarr_stats["missing"], "#f55")
radarr_stats["available"] = (radarr_stats["available"], "#5f5")
radarr_stats["missing"] = (radarr_stats["missing"], "#f55")
imgs = [
[
"Media",
histogram([vbitrates], "auto", "Video Bitrate", unit="b/s"),
histogram([abitrates], "auto", "Audio Bitrate", unit="b/s"),
piechart(dict(Counter(vcodecs)), "Video codecs"),
piechart(dict(Counter(acodecs)), "Audio codecs"),
piechart(dict(Counter(formats)), "Container formats"),
piechart(dict(Counter(qualities)), "Quality"),
piechart(sizes, "Disk usage", byte_labels),
piechart(sonarr_stats, "Episodes"),
piechart(radarr_stats, "Movies"),
],
[
"Torrents",
piechart(torrent_states, "Torrents"),
piechart(torrent_categories, "Torrent categories"),
piechart(
{"Upload": hist["ul"][-1]+0.0, "Download": hist["dl"][-1]+0.0},
"Bandwidth utilization",
byte_rate_labels,
sort=False,
),
stackplot(
hist,
{"Download": "dl", "Upload": "ul"},
"Transfer speed",
unit="b/s",
smooth=smoothness,
),
stackplot(
hist,
{"Download": "dl_size_sess", "Upload": "ul_size_sess"},
"Transfer volume (Session)",
unit="b",
),
stackplot(
hist,
{"Download": "dl_size", "Upload": "ul_size"},
"Transfer volume (Total)",
unit="b",
),
lineplot(
hist,
{"Connections": "connections"},
"Peers",
unit=None,
smooth=smoothness,
),
lineplot(
hist,
{"Bandwidth per connection": "bw_per_conn"},
"Connections",
unit="b/s",
smooth=smoothness,
),
lineplot(hist, {"DHT Nodes": "dht_nodes"}, "DHT", unit=None),
],
]
calendar = {"movies":[],"episodes":[]}
for movie in c.radarr.calendar():
calendar["movies"].append(movie)
for episode in c.sonarr.calendar():
t = episode['airDateUtc'].rstrip("Z").split(".")[0]
t = datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
episode['hasAired']=datetime.today()>t
calendar["episodes"].append({"episode":episode,"series":series[episode["seriesId"]]})
return {"data": data, "images": imgs, "hist": hist,"calendar": calendar}
if os.path.isfile("stats.json"):
with open("stats.json", "r") as of:
try:
hist = json.load(of)["hist"]
except Exception as e:
print("Error loading history:", str(e))
def update():
print("Updating...")
try:
stats = collect_stats()
except Exception as e:
print("Error collectin statistics:", str(e))
stats = None
if stats:
with open("stats_temp.json", "w") as of:
json.dump(stats, of)
open("stats.lock", "w").close()
if os.path.isfile("stats.json"):
os.unlink("stats.json")
os.rename("stats_temp.json", "stats.json")
os.unlink("stats.lock")
print("Done!")
def loop(seconds):
while True:
update()
time.sleep(seconds)
if __name__=="__main__":
update()