MediaDash/stats_collect.py

508 lines
16 KiB
Python

import io
import os
import shutil
import threading
import time
from base64 import b64encode
from concurrent.futures import ThreadPoolExecutor, as_completed
from datetime import datetime
from urllib.parse import quote
import pylab as PL
import ujson as json
from matplotlib.ticker import EngFormatter
from api import Client
from utils import handle_config
mpl_style = "dark_background"
smoothness = 5
def make_svg(data, dtype):
data_uri = "data:{};base64,{}".format(
dtype, quote(str(b64encode(data), "ascii")))
return '<embed type="image/svg+xml" src="{}"/>'.format(data_uri)
def make_smooth(data, window_size):
ret = []
for i, _ in enumerate(data):
block = data[i: i + window_size]
ret.append(sum(block) / len(block))
return ret
def stackplot(data, names, title=None, color="#eee", unit=None, smooth=0):
fig = io.BytesIO()
with PL.style.context(mpl_style):
labels = []
values = []
for k, v in names.items():
t = list(map(datetime.fromtimestamp, data["t"]))
if smooth:
data[v] = make_smooth(data[v], smooth)
values.append(data[v])
labels.append(k)
PL.stackplot(t, values, labels=labels)
PL.legend()
PL.grid(True, ls="--")
PL.gcf().autofmt_xdate()
PL.gca().margins(x=0)
if title:
PL.title(title)
if unit:
PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def lineplot(data, names, title=None, color="#eee", unit=None, smooth=0):
fig = io.BytesIO()
with PL.style.context(mpl_style):
for k, v in names.items():
t = list(map(datetime.fromtimestamp, data["t"]))
if smooth:
data[v] = make_smooth(data[v], smooth)
PL.plot(t, data[v], label=k)
PL.legend()
PL.grid(True, ls="--")
PL.gcf().autofmt_xdate()
PL.gca().margins(x=0)
if title:
PL.title(title)
if unit:
PL.gca().yaxis.set_major_formatter(EngFormatter(unit=unit))
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def histogram(values, bins, title=None, color="#eee", unit=""):
fig = io.BytesIO()
with PL.style.context(mpl_style):
PL.hist(values, bins=bins, log=True)
if title:
PL.title(title)
PL.grid(True, ls="--")
PL.gca().xaxis.set_major_formatter(EngFormatter(unit=unit))
PL.gca().margins(x=0)
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
def prc_label(label, idx, values):
return "{} ({}, {:.2%}%)".format(
label, values[idx], values[idx] / sum(values))
def byte_labels(label, idx, values):
orig_values = list(values)
suffix = ["", "K", "M", "G", "T", "P", "E"]
i = 0
while values[idx] > 1024 and i < len(suffix):
values[idx] /= 1024
i += 1
val = "{:.2f} {}iB".format(values[idx], suffix[i])
return "{} ({}, {:.2%}%)".format(
label, val, orig_values[idx] / sum(orig_values))
def byte_rate_labels(label, idx, values):
suffix = ["", "K", "M", "G", "T", "P", "E"]
i = 0
while values[idx] > 1024 and i < len(suffix):
values[idx] /= 1024
i += 1
val = "{:.2f} {}iB/s".format(values[idx], suffix[i])
return "{} ({})".format(label, val)
def piechart(items, title=None, labelfunc=prc_label, sort=True):
fig = io.BytesIO()
labels = []
values = []
colors = []
if sort:
items = sorted(items.items(), key=lambda v: v[1])
else:
items = sorted(items.items())
for k, v in items:
labels.append(k)
if isinstance(v, tuple) and len(v) == 2:
v, c = v
colors.append(c)
values.append(v)
colors = colors or None
for i, label in enumerate(labels):
labels[i] = labelfunc(label, i, values[:])
with PL.style.context(mpl_style):
PL.pie(values, labels=labels, colors=colors, labeldistance=None)
PL.legend()
if title:
PL.title(title)
PL.tight_layout()
PL.savefig(fig, format="svg", transparent=True)
PL.clf()
return make_svg(fig.getvalue(), "image/svg+xml")
qbt_hist = {
"t": [],
"dl": [],
"ul": [],
"dl_size": [],
"ul_size": [],
"dl_size_sess": [],
"ul_size_sess": [],
"connections": [],
"bw_per_conn": [],
"dht_nodes": [],
}
def update_qbt_hist(stats, limit=1024):
global qbt_hist
data = stats["qbt"]["status"]
qbt_hist["t"].append(time.time())
qbt_hist["dl"].append(data["server_state"]["dl_info_speed"])
qbt_hist["ul"].append(data["server_state"]["up_info_speed"])
qbt_hist["dl_size"].append(data["server_state"]["alltime_dl"])
qbt_hist["ul_size"].append(data["server_state"]["alltime_ul"])
qbt_hist["dl_size_sess"].append(data["server_state"]["dl_info_data"])
qbt_hist["ul_size_sess"].append(data["server_state"]["up_info_data"])
qbt_hist["connections"].append(
data["server_state"]["total_peer_connections"])
qbt_hist["dht_nodes"].append(data["server_state"]["dht_nodes"])
qbt_hist["bw_per_conn"].append(
(data["server_state"]["dl_info_speed"] +
data["server_state"]["up_info_speed"]) /
data["server_state"]["total_peer_connections"])
for k in qbt_hist:
qbt_hist[k] = qbt_hist[k][-limit:]
last_idx = 0
for i, (t1, t2) in enumerate(zip(qbt_hist["t"], qbt_hist["t"][1:])):
if abs(t1 - t2) > (60 * 60): # 1h
last_idx = i + 1
for k in qbt_hist:
qbt_hist[k] = qbt_hist[k][last_idx:]
return qbt_hist
def qbt_stats():
cfg = handle_config()
c = Client(cfg)
return {"status": c.qbittorent.status()}
def get_base_stats(pool):
cfg = handle_config()
client = Client(cfg)
sonarr = {}
radarr = {}
qbt = {}
jellyfin = {}
sonarr["entries"] = pool.submit(client.sonarr.series)
sonarr["status"] = pool.submit(client.sonarr.status)
sonarr["calendar"] = pool.submit(client.sonarr.calendar)
radarr["entries"] = pool.submit(client.radarr.movies)
radarr["status"] = pool.submit(client.radarr.status)
radarr["calendar"] = pool.submit(client.radarr.calendar)
qbt["status"] = pool.submit(client.qbittorent.status)
t_1 = datetime.today()
jellyfin["library"] = pool.submit(client.jellyfin.get_library)
ret = {}
for d in sonarr, radarr, qbt, jellyfin:
for k, v in d.items():
if hasattr(v, "result"):
d[k] = v.result()
print("Jellyfin[{}]:".format(k), datetime.today() - t_1)
sonarr["details"] = {}
return {
"sonarr": sonarr,
"radarr": radarr,
"qbt": qbt,
"jellyfin": jellyfin}
def collect_stats(pool):
from collections import Counter
PL.clf()
cfg = handle_config()
c = Client(cfg)
series = {}
movies = {}
data = get_base_stats(pool)
for show in data["sonarr"]["entries"]:
series[show["id"]] = show
for movie in data["radarr"]["entries"]:
movies[movie["id"]] = movie
torrent_states = {}
torrent_categories = {}
for torrent in data["qbt"]["status"]["torrents"].values():
state = c.qbittorent.status_map.get(
torrent["state"], (torrent["state"], None))[0]
category = torrent["category"] or "<None>"
torrent_states.setdefault(state, 0)
torrent_categories.setdefault(category, 0)
torrent_states[state] += 1
torrent_categories[category] += 1
vbitrates = []
abitrates = []
acodecs = []
vcodecs = []
qualities = []
formats = []
sizes = {"Shows": 0, "Movies": 0}
radarr_stats = {"missing": 0, "available": 0}
for movie in data["radarr"]["entries"]:
if movie["hasFile"]:
radarr_stats["available"] += 1
else:
radarr_stats["missing"] += 1
sizes["Movies"] += movie.get("movieFile", {}).get("size", 0)
vbr = movie.get(
"movieFile",
{}).get(
"mediaInfo",
{}).get(
"videoBitrate",
None)
abr = movie.get(
"movieFile",
{}).get(
"mediaInfo",
{}).get(
"audioBitrate",
None)
acodec = movie.get(
"movieFile",
{}).get(
"mediaInfo",
{}).get(
"audioCodec",
None)
vcodec = movie.get(
"movieFile",
{}).get(
"mediaInfo",
{}).get(
"videoCodec",
None)
fmt = movie.get("movieFile", {}).get(
"relativePath", "").split(".")[-1].lower()
qual = (
movie.get(
"movieFile",
{}).get(
"quality",
{}).get(
"quality",
{}).get("name"))
if qual:
qualities.append(qual)
if acodec:
acodecs.append(acodec)
if vcodec:
if vcodec.lower() in ["x265", "h265", "hevc"]:
vcodec = "H.265"
if vcodec.lower() in ["x264", "h264"]:
vcodec = "H.264"
vcodecs.append(vcodec)
if vbr:
vbitrates.append(vbr)
if abr:
abitrates.append(abr)
if fmt:
formats.append(fmt)
sonarr_stats = {"missing": 0, "available": 0}
info_jobs = []
for show in data["sonarr"]["entries"]:
info_jobs.append(pool.submit(c.sonarr.series, show["id"]))
t_1 = datetime.today()
for job, show in zip(
as_completed(info_jobs),
data["sonarr"]["entries"],
):
info = job.result()
data["sonarr"]["details"][show["id"]] = info
for file in info["episodeFile"]:
vbr = file.get("mediaInfo", {}).get("videoBitrate", None)
abr = file.get("mediaInfo", {}).get("audioBitrate", None)
acodec = file.get("mediaInfo", {}).get("audioCodec", None)
vcodec = file.get("mediaInfo", {}).get("videoCodec", None)
fmt = file.get("relativePath", "").split(".")[-1].lower()
qual = file.get("quality", {}).get("quality", {}).get("name")
sizes["Shows"] += file.get("size", 0)
if qual:
qualities.append(qual)
if acodec:
acodecs.append(acodec)
if vcodec:
if vcodec.lower() in ["x265", "h265", "hevc"]:
vcodec = "H.265"
if vcodec.lower() in ["x264", "h264"]:
vcodec = "H.264"
vcodecs.append(vcodec)
if vbr:
vbitrates.append(vbr)
if abr:
abitrates.append(abr)
if fmt:
formats.append(fmt)
for season in show.get("seasons", []):
stats = season.get("statistics", {})
sonarr_stats["missing"] += (
stats["totalEpisodeCount"] - stats["episodeFileCount"]
)
sonarr_stats["available"] += stats["episodeFileCount"]
print("Sonarr:", datetime.today() - t_1)
qbt_hist = update_qbt_hist(data)
calendar = {"movies": [], "episodes": []}
for movie in data.get("radarr", {}).pop("calendar", []):
calendar["movies"].append(movie)
for episode in data.get("sonarr", {}).pop("calendar", []):
t = episode["airDateUtc"].rstrip("Z").split(".")[0]
t = datetime.strptime(t, "%Y-%m-%dT%H:%M:%S")
episode["hasAired"] = datetime.today() > t
details = c.sonarr.details(episode["id"])
calendar["episodes"].append(
{
"episode": episode,
"details": details,
"series": series[episode["seriesId"]],
}
)
library = data.pop("jellyfin", {}).pop("library", None)
sonarr_stats["available"] = (sonarr_stats["available"], "#5f5")
sonarr_stats["missing"] = (sonarr_stats["missing"], "#f55")
radarr_stats["available"] = (radarr_stats["available"], "#5f5")
radarr_stats["missing"] = (radarr_stats["missing"], "#f55")
t_1 = datetime.today()
imgs = [
[
"Media",
histogram([vbitrates], "auto", "Video Bitrate", unit="b/s"),
histogram([abitrates], "auto", "Audio Bitrate", unit="b/s"),
piechart(dict(Counter(vcodecs)), "Video codecs"),
piechart(dict(Counter(acodecs)), "Audio codecs"),
piechart(dict(Counter(formats)), "Container formats"),
piechart(dict(Counter(qualities)), "Quality"),
piechart(sizes, "Disk usage", byte_labels),
piechart(sonarr_stats, "Episodes"),
piechart(radarr_stats, "Movies"),
],
[
"Torrents",
piechart(torrent_states, "Torrents"),
piechart(torrent_categories, "Torrent categories"),
piechart(
{
"Upload": qbt_hist["ul"][-1] + 0.0,
"Download": qbt_hist["dl"][-1] + 0.0,
},
"Bandwidth utilization",
byte_rate_labels,
sort=False,
),
stackplot(
qbt_hist,
{"Download": "dl", "Upload": "ul"},
"Transfer speed",
unit="b/s",
smooth=smoothness,
),
stackplot(
qbt_hist,
{"Download": "dl_size_sess", "Upload": "ul_size_sess"},
"Transfer volume (Session)",
unit="b",
),
stackplot(
qbt_hist,
{"Download": "dl_size", "Upload": "ul_size"},
"Transfer volume (Total)",
unit="b",
),
lineplot(
qbt_hist,
{"Connections": "connections"},
"Peers",
unit=None,
smooth=smoothness,
),
lineplot(
qbt_hist,
{"Bandwidth per connection": "bw_per_conn"},
"Connections",
unit="b/s",
smooth=smoothness,
),
lineplot(qbt_hist, {"DHT Nodes": "dht_nodes"}, "DHT", unit=None),
],
]
print("Diagrams:", datetime.today() - t_1)
return {
"data": data,
"images": imgs,
"qbt_hist": qbt_hist,
"calendar": calendar,
"library": library,
}
def update():
try:
with ThreadPoolExecutor(16) as pool:
stats = collect_stats(pool)
except Exception as e:
print("Error collectin statistics:", e)
stats = None
if stats:
for k, v in stats.items():
with open("stats/{}_temp.json".format(k), "w") as of:
json.dump(v, of)
shutil.move(
"stats/{}_temp.json".format(k),
"stats/{}.json".format(k))
print("Done!")
def loop(seconds):
t_start = time.time()
print("Updating")
update()
dt = time.time() - t_start
print("Next update in", seconds - dt)
t = threading.Timer(seconds - dt, loop, (seconds,))
t.start()
class Stats(object):
def __init__(self):
self.override = {}
def __setitem__(self, key, value):
if os.path.isfile("stats/{}.json".format(key)):
self.override[key] = value
def __getitem__(self, key):
try:
with open("stats/{}.json".format(key)) as fh:
if key in self.override:
return self.override[key]
return json.load(fh)
except Exception as e:
print("Error opening stats file:", key, e)
return []
if __name__ == "__main__":
update()