code time

This commit is contained in:
cere 2024-02-21 00:35:31 -05:00
parent 789a5e0b02
commit ae28da8d60
153 changed files with 56768 additions and 1 deletions

View file

@ -0,0 +1,4 @@
import os
import sys
sys.path.insert(1, os.path.join(os.path.dirname(__file__)))

4417
resources/lib/deps/bottle.py Executable file

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,307 @@
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''provides a simple stateless caching system for Kodi addons and plugins'''
import sys
import xbmcvfs
import xbmcgui
import xbmc
import xbmcaddon
import datetime
import time
import sqlite3
import json
from functools import reduce
class SimpleCache(object):
'''simple stateless caching system for Kodi'''
enable_mem_cache = True
data_is_json = False
global_checksum = None
_exit = False
_auto_clean_interval = datetime.timedelta(hours=4)
_win = None
_busy_tasks = []
_database = None
def __init__(self, addon_id):
'''Initialize our caching class'''
self.addon_id = addon_id
self._win = xbmcgui.Window(10000)
self._monitor = xbmc.Monitor()
self.check_cleanup()
self._log_msg("Initialized")
def close(self):
'''tell any tasks to stop immediately (as we can be called multithreaded) and cleanup objects'''
self._exit = True
# wait for all tasks to complete
while self._busy_tasks and not self._monitor.abortRequested():
xbmc.sleep(25)
del self._win
del self._monitor
self._log_msg("Closed")
def __del__(self):
'''make sure close is called'''
if not self._exit:
self.close()
def get(self, endpoint, checksum="", json_data=False):
'''
get object from cache and return the results
endpoint: the (unique) name of the cache object as reference
checkum: optional argument to check if the checksum in the cacheobject matches the checkum provided
'''
checksum = self._get_checksum(checksum)
cur_time = self._get_timestamp(datetime.datetime.now())
result = None
# 1: try memory cache first
if self.enable_mem_cache:
result = self._get_mem_cache(endpoint, checksum, cur_time, json_data)
# 2: fallback to _database cache
if result is None:
result = self._get_db_cache(endpoint, checksum, cur_time, json_data)
return result
def set(self, endpoint, data, checksum="", expiration=datetime.timedelta(days=30), json_data=False):
'''
set data in cache
'''
task_name = "set.%s" % endpoint
self._busy_tasks.append(task_name)
checksum = self._get_checksum(checksum)
expires = self._get_timestamp(datetime.datetime.now() + expiration)
# memory cache: write to window property
if self.enable_mem_cache and not self._exit:
self._set_mem_cache(endpoint, checksum, expires, data, json_data)
# db cache
if not self._exit:
self._set_db_cache(endpoint, checksum, expires, data, json_data)
# remove this task from list
self._busy_tasks.remove(task_name)
def check_cleanup(self):
'''check if cleanup is needed - public method, may be called by calling addon'''
cur_time = datetime.datetime.now()
lastexecuted = self._win.getProperty("simplecache.clean.lastexecuted")
if not lastexecuted:
self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time))
elif (eval(lastexecuted) + self._auto_clean_interval) < cur_time:
# cleanup needed...
self._do_cleanup()
def _get_mem_cache(self, endpoint, checksum, cur_time, json_data):
'''
get cache data from memory cache
we use window properties because we need to be stateless
'''
result = None
cachedata = self._win.getProperty(endpoint)
if cachedata:
if json_data or self.data_is_json:
cachedata = json.loads(cachedata)
else:
cachedata = eval(cachedata)
if cachedata[0] > cur_time:
if not checksum or checksum == cachedata[2]:
result = cachedata[1]
return result
def _set_mem_cache(self, endpoint, checksum, expires, data, json_data):
'''
window property cache as alternative for memory cache
usefull for (stateless) plugins
'''
cachedata = (expires, data, checksum)
if json_data or self.data_is_json:
cachedata_str = json.dumps(cachedata)
else:
cachedata_str = repr(cachedata)
self._win.setProperty(endpoint, cachedata_str)
def _get_db_cache(self, endpoint, checksum, cur_time, json_data):
'''get cache data from sqllite _database'''
result = None
query = "SELECT expires, data, checksum FROM simplecache WHERE id = ?"
cache_data = self._execute_sql(query, (endpoint,))
if cache_data:
cache_data = cache_data.fetchone()
if cache_data and cache_data[0] > cur_time:
if not checksum or cache_data[2] == checksum:
if json_data or self.data_is_json:
result = json.loads(cache_data[1])
else:
result = eval(cache_data[1])
# also set result in memory cache for further access
if self.enable_mem_cache:
self._set_mem_cache(endpoint, checksum, cache_data[0], result, json_data)
return result
def _set_db_cache(self, endpoint, checksum, expires, data, json_data):
''' store cache data in _database '''
query = "INSERT OR REPLACE INTO simplecache( id, expires, data, checksum) VALUES (?, ?, ?, ?)"
if json_data or self.data_is_json:
data = json.dumps(data)
else:
data = repr(data)
self._execute_sql(query, (endpoint, expires, data, checksum))
def _do_cleanup(self):
'''perform cleanup task'''
if self._exit or self._monitor.abortRequested():
return
self._busy_tasks.append(__name__)
cur_time = datetime.datetime.now()
cur_timestamp = self._get_timestamp(cur_time)
self._log_msg("Running cleanup...")
if self._win.getProperty("simplecachecleanbusy"):
return
self._win.setProperty("simplecachecleanbusy", "busy")
query = "SELECT id, expires FROM simplecache"
for cache_data in self._execute_sql(query).fetchall():
cache_id = cache_data[0]
cache_expires = cache_data[1]
if self._exit or self._monitor.abortRequested():
return
# always cleanup all memory objects on each interval
self._win.clearProperty(cache_id)
# clean up db cache object only if expired
if cache_expires < cur_timestamp:
query = 'DELETE FROM simplecache WHERE id = ?'
self._execute_sql(query, (cache_id,))
self._log_msg("delete from db %s" % cache_id)
# compact db
self._execute_sql("VACUUM")
# remove task from list
self._busy_tasks.remove(__name__)
self._win.setProperty("simplecache.clean.lastexecuted", repr(cur_time))
self._win.clearProperty("simplecachecleanbusy")
self._log_msg("Auto cleanup done")
def _get_database(self):
'''get reference to our sqllite _database - performs basic integrity check'''
addon = xbmcaddon.Addon(self.addon_id)
dbpath = addon.getAddonInfo('profile')
dbfile = xbmcvfs.translatePath("%s/simplecache.db" % dbpath)
if not xbmcvfs.exists(dbpath):
xbmcvfs.mkdirs(dbpath)
del addon
try:
connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
connection.execute('SELECT * FROM simplecache LIMIT 1')
return connection
except Exception as error:
# our _database is corrupt or doesn't exist yet, we simply try to recreate it
if xbmcvfs.exists(dbfile):
xbmcvfs.delete(dbfile)
try:
connection = sqlite3.connect(dbfile, timeout=30, isolation_level=None)
connection.execute(
"""CREATE TABLE IF NOT EXISTS simplecache(
id TEXT UNIQUE, expires INTEGER, data TEXT, checksum INTEGER)""")
return connection
except Exception as error:
self._log_msg("Exception while initializing _database: %s" % str(error), xbmc.LOGWARNING)
self.close()
return None
def _execute_sql(self, query, data=None):
'''little wrapper around execute and executemany to just retry a db command if db is locked'''
retries = 0
result = None
error = None
# always use new db object because we need to be sure that data is available for other simplecache instances
with self._get_database() as _database:
while not retries == 10 and not self._monitor.abortRequested():
if self._exit:
return None
try:
if isinstance(data, list):
result = _database.executemany(query, data)
elif data:
result = _database.execute(query, data)
else:
result = _database.execute(query)
return result
except sqlite3.OperationalError as error:
if "_database is locked" in error:
self._log_msg("retrying DB commit...")
retries += 1
self._monitor.waitForAbort(0.5)
else:
break
except Exception as error:
break
self._log_msg("_database ERROR ! -- %s" % str(error), xbmc.LOGWARNING)
return None
@staticmethod
def _log_msg(msg, loglevel=xbmc.LOGDEBUG):
'''helper to send a message to the kodi log'''
xbmc.log("Skin Helper Simplecache --> %s" % msg, level=loglevel)
@staticmethod
def _get_timestamp(date_time):
'''Converts a datetime object to unix timestamp'''
return int(time.mktime(date_time.timetuple()))
def _get_checksum(self, stringinput):
'''get int checksum from string'''
if not stringinput and not self.global_checksum:
return 0
if self.global_checksum:
stringinput = "%s-%s" %(self.global_checksum, stringinput)
else:
stringinput = str(stringinput)
return reduce(lambda x, y: x + y, map(ord, stringinput))
def use_cache(cache_days=14):
'''
wrapper around our simple cache to use as decorator
Usage: define an instance of SimpleCache with name "cache" (self.cache) in your class
Any method that needs caching just add @use_cache as decorator
NOTE: use unnamed arguments for calling the method and named arguments for optional settings
'''
def decorator(func):
'''our decorator'''
def decorated(*args, **kwargs):
'''process the original method and apply caching of the results'''
method_class = args[0]
method_class_name = method_class.__class__.__name__
cache_str = "%s.%s" % (method_class_name, func.__name__)
# cache identifier is based on positional args only
# named args are considered optional and ignored
for item in args[1:]:
cache_str += u".%s" % item
cache_str = cache_str.lower()
cachedata = method_class.cache.get(cache_str)
global_cache_ignore = False
try:
global_cache_ignore = method_class.ignore_cache
except Exception:
pass
if cachedata is not None and not kwargs.get("ignore_cache", False) and not global_cache_ignore:
return cachedata
else:
result = func(*args, **kwargs)
method_class.cache.set(cache_str, result, expiration=datetime.timedelta(days=cache_days))
return result
return decorated
return decorator

View file

@ -0,0 +1,5 @@
from .cache_handler import * # noqa
from .client import * # noqa
from .exceptions import * # noqa
from .oauth2 import * # noqa
from .util import * # noqa

View file

@ -0,0 +1,173 @@
__all__ = [
'CacheHandler',
'CacheFileHandler',
'DjangoSessionCacheHandler',
'FlaskSessionCacheHandler',
'MemoryCacheHandler']
import errno
import json
import logging
import os
from spotipy.util import CLIENT_CREDS_ENV_VARS
logger = logging.getLogger(__name__)
class CacheHandler():
"""
An abstraction layer for handling the caching and retrieval of
authorization tokens.
Custom extensions of this class must implement get_cached_token
and save_token_to_cache methods with the same input and output
structure as the CacheHandler class.
"""
def get_cached_token(self):
"""
Get and return a token_info dictionary object.
"""
# return token_info
raise NotImplementedError()
def save_token_to_cache(self, token_info):
"""
Save a token_info dictionary object to the cache and return None.
"""
raise NotImplementedError()
return None
class CacheFileHandler(CacheHandler):
"""
Handles reading and writing cached Spotify authorization tokens
as json files on disk.
"""
def __init__(self,
cache_path=None,
username=None,
encoder_cls=None):
"""
Parameters:
* cache_path: May be supplied, will otherwise be generated
(takes precedence over `username`)
* username: May be supplied or set as environment variable
(will set `cache_path` to `.cache-{username}`)
* encoder_cls: May be supplied as a means of overwriting the
default serializer used for writing tokens to disk
"""
self.encoder_cls = encoder_cls
if cache_path:
self.cache_path = cache_path
else:
cache_path = ".cache"
username = (username or os.getenv(CLIENT_CREDS_ENV_VARS["client_username"]))
if username:
cache_path += "-" + str(username)
self.cache_path = cache_path
def get_cached_token(self):
token_info = None
try:
f = open(self.cache_path)
token_info_string = f.read()
f.close()
token_info = json.loads(token_info_string)
except IOError as error:
if error.errno == errno.ENOENT:
logger.debug("cache does not exist at: %s", self.cache_path)
else:
logger.warning("Couldn't read cache at: %s", self.cache_path)
return token_info
def save_token_to_cache(self, token_info):
try:
f = open(self.cache_path, "w")
f.write(json.dumps(token_info, cls=self.encoder_cls))
f.close()
except IOError:
logger.warning('Couldn\'t write token to cache at: %s',
self.cache_path)
class MemoryCacheHandler(CacheHandler):
"""
A cache handler that simply stores the token info in memory as an
instance attribute of this class. The token info will be lost when this
instance is freed.
"""
def __init__(self, token_info=None):
"""
Parameters:
* token_info: The token info to store in memory. Can be None.
"""
self.token_info = token_info
def get_cached_token(self):
return self.token_info
def save_token_to_cache(self, token_info):
self.token_info = token_info
class DjangoSessionCacheHandler(CacheHandler):
"""
A cache handler that stores the token info in the session framework
provided by Django.
Read more at https://docs.djangoproject.com/en/3.2/topics/http/sessions/
"""
def __init__(self, request):
"""
Parameters:
* request: HttpRequest object provided by Django for every
incoming request
"""
self.request = request
def get_cached_token(self):
token_info = None
try:
token_info = self.request.session['token_info']
except KeyError:
logger.debug("Token not found in the session")
return token_info
def save_token_to_cache(self, token_info):
try:
self.request.session['token_info'] = token_info
except Exception as e:
logger.warning("Error saving token to cache: " + str(e))
class FlaskSessionCacheHandler(CacheHandler):
"""
A cache handler that stores the token info in the session framework
provided by flask.
"""
def __init__(self, session):
self.session = session
def get_cached_token(self):
token_info = None
try:
token_info = self.session["token_info"]
except KeyError:
logger.debug("Token not found in the session")
return token_info
def save_token_to_cache(self, token_info):
try:
self.session["token_info"] = token_info
except Exception as e:
logger.warning("Error saving token to cache: " + str(e))

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,16 @@
class SpotifyException(Exception):
def __init__(self, http_status, code, msg, reason=None, headers=None):
self.http_status = http_status
self.code = code
self.msg = msg
self.reason = reason
# `headers` is used to support `Retry-After` in the event of a
# 429 status code.
if headers is None:
headers = {}
self.headers = headers
def __str__(self):
return 'http status: {0}, code:{1} - {2}, reason: {3}'.format(
self.http_status, self.code, self.msg, self.reason)

File diff suppressed because it is too large Load diff

View file

@ -0,0 +1,135 @@
# -*- coding: utf-8 -*-
""" Shows a user's playlists (need to be authenticated via oauth) """
__all__ = ["CLIENT_CREDS_ENV_VARS", "prompt_for_user_token"]
import logging
import os
import warnings
import spotipy
LOGGER = logging.getLogger(__name__)
CLIENT_CREDS_ENV_VARS = {
"client_id": "SPOTIPY_CLIENT_ID",
"client_secret": "SPOTIPY_CLIENT_SECRET",
"client_username": "SPOTIPY_CLIENT_USERNAME",
"redirect_uri": "SPOTIPY_REDIRECT_URI",
}
def prompt_for_user_token(
username=None,
scope=None,
client_id=None,
client_secret=None,
redirect_uri=None,
cache_path=None,
oauth_manager=None,
show_dialog=False
):
warnings.warn(
"'prompt_for_user_token' is deprecated."
"Use the following instead: "
" auth_manager=SpotifyOAuth(scope=scope)"
" spotipy.Spotify(auth_manager=auth_manager)",
DeprecationWarning
)
""" prompts the user to login if necessary and returns
the user token suitable for use with the spotipy.Spotify
constructor
Parameters:
- username - the Spotify username (optional)
- scope - the desired scope of the request (optional)
- client_id - the client id of your app (required)
- client_secret - the client secret of your app (required)
- redirect_uri - the redirect URI of your app (required)
- cache_path - path to location to save tokens (optional)
- oauth_manager - Oauth manager object (optional)
- show_dialog - If true, a login prompt always shows (optional, defaults to False)
"""
if not oauth_manager:
if not client_id:
client_id = os.getenv("SPOTIPY_CLIENT_ID")
if not client_secret:
client_secret = os.getenv("SPOTIPY_CLIENT_SECRET")
if not redirect_uri:
redirect_uri = os.getenv("SPOTIPY_REDIRECT_URI")
if not client_id:
LOGGER.warning(
"""
You need to set your Spotify API credentials.
You can do this by setting environment variables like so:
export SPOTIPY_CLIENT_ID='your-spotify-client-id'
export SPOTIPY_CLIENT_SECRET='your-spotify-client-secret'
export SPOTIPY_REDIRECT_URI='your-app-redirect-url'
Get your credentials at
https://developer.spotify.com/my-applications
"""
)
raise spotipy.SpotifyException(550, -1, "no credentials set")
sp_oauth = oauth_manager or spotipy.SpotifyOAuth(
client_id,
client_secret,
redirect_uri,
scope=scope,
cache_path=cache_path,
username=username,
show_dialog=show_dialog
)
# try to get a valid token for this user, from the cache,
# if not in the cache, then create a new (this will send
# the user to a web page where they can authorize this app)
token_info = sp_oauth.validate_token(sp_oauth.cache_handler.get_cached_token())
if not token_info:
code = sp_oauth.get_auth_response()
token = sp_oauth.get_access_token(code, as_dict=False)
else:
return token_info["access_token"]
# Auth'ed API request
if token:
return token
else:
return None
def get_host_port(netloc):
if ":" in netloc:
host, port = netloc.split(":", 1)
port = int(port)
else:
host = netloc
port = None
return host, port
def normalize_scope(scope):
if scope:
if isinstance(scope, str):
scopes = scope.split(',')
elif isinstance(scope, list) or isinstance(scope, tuple):
scopes = scope
else:
raise Exception(
"Unsupported scope value, please either provide a list of scopes, "
"or a string of scopes separated by commas"
)
return " ".join(sorted(scopes))
else:
return None