Merge pull request #1636 from saltycrys/rework-logging

Rework logging
This commit is contained in:
Perflyst 2021-01-06 16:09:32 +01:00 committed by GitHub
commit 168376b046
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
14 changed files with 119 additions and 124 deletions

View file

@ -106,34 +106,30 @@ LOCALES = {
YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0) YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0)
config = CONFIG # CLI
output = STDOUT
loglvl = LogLevel::Debug
Kemal.config.extra_options do |parser| Kemal.config.extra_options do |parser|
parser.banner = "Usage: invidious [arguments]" parser.banner = "Usage: invidious [arguments]"
parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number| parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{CONFIG.channel_threads})") do |number|
begin begin
config.channel_threads = number.to_i CONFIG.channel_threads = number.to_i
rescue ex rescue ex
puts "THREADS must be integer" puts "THREADS must be integer"
exit exit
end end
end end
parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number| parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{CONFIG.feed_threads})") do |number|
begin begin
config.feed_threads = number.to_i CONFIG.feed_threads = number.to_i
rescue ex rescue ex
puts "THREADS must be integer" puts "THREADS must be integer"
exit exit
end end
end end
parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output_arg| parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output|
FileUtils.mkdir_p(File.dirname(output_arg)) CONFIG.output = output
output = File.open(output_arg, mode: "a")
end end
parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{loglvl})") do |loglvl_arg| parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level|
loglvl = LogLevel.parse(loglvl_arg) CONFIG.log_level = LogLevel.parse(log_level)
end end
parser.on("-v", "--version", "Print version") do parser.on("-v", "--version", "Print version") do
puts SOFTWARE.to_pretty_json puts SOFTWARE.to_pretty_json
@ -143,35 +139,41 @@ end
Kemal::CLI.new ARGV Kemal::CLI.new ARGV
logger = Invidious::LogHandler.new(output, loglvl) if CONFIG.output.upcase != "STDOUT"
FileUtils.mkdir_p(File.dirname(CONFIG.output))
end
OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level)
config = CONFIG
# Check table integrity # Check table integrity
if CONFIG.check_tables if CONFIG.check_tables
check_enum(PG_DB, logger, "privacy", PlaylistPrivacy) check_enum(PG_DB, "privacy", PlaylistPrivacy)
check_table(PG_DB, logger, "channels", InvidiousChannel) check_table(PG_DB, "channels", InvidiousChannel)
check_table(PG_DB, logger, "channel_videos", ChannelVideo) check_table(PG_DB, "channel_videos", ChannelVideo)
check_table(PG_DB, logger, "playlists", InvidiousPlaylist) check_table(PG_DB, "playlists", InvidiousPlaylist)
check_table(PG_DB, logger, "playlist_videos", PlaylistVideo) check_table(PG_DB, "playlist_videos", PlaylistVideo)
check_table(PG_DB, logger, "nonces", Nonce) check_table(PG_DB, "nonces", Nonce)
check_table(PG_DB, logger, "session_ids", SessionId) check_table(PG_DB, "session_ids", SessionId)
check_table(PG_DB, logger, "users", User) check_table(PG_DB, "users", User)
check_table(PG_DB, logger, "videos", Video) check_table(PG_DB, "videos", Video)
if CONFIG.cache_annotations if CONFIG.cache_annotations
check_table(PG_DB, logger, "annotations", Annotation) check_table(PG_DB, "annotations", Annotation)
end end
end end
# Start jobs # Start jobs
Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, logger, config) Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, config)
Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, logger, config) Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, config)
Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, logger, config, HMAC_KEY) Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, config, HMAC_KEY)
DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling) DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling)
if config.decrypt_polling if config.decrypt_polling
Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new(logger) Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new
end end
if config.statistics_enabled if config.statistics_enabled
@ -183,7 +185,7 @@ if config.popular_enabled
end end
if config.captcha_key if config.captcha_key
Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(logger, config) Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(config)
end end
connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32) connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32)
@ -261,7 +263,7 @@ before_all do |env|
headers["Cookie"] = env.request.headers["Cookie"] headers["Cookie"] = env.request.headers["Cookie"]
begin begin
user, sid = get_user(sid, headers, PG_DB, logger, false) user, sid = get_user(sid, headers, PG_DB, false)
csrf_token = generate_response(sid, { csrf_token = generate_response(sid, {
":authorize_token", ":authorize_token",
":playlist_ajax", ":playlist_ajax",
@ -531,7 +533,7 @@ post "/subscription_ajax" do |env|
case action case action
when "action_create_subscription_to_channel" when "action_create_subscription_to_channel"
if !user.subscriptions.includes? channel_id if !user.subscriptions.includes? channel_id
get_channel(channel_id, PG_DB, logger, false, false) get_channel(channel_id, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email)
end end
when "action_remove_subscriptions" when "action_remove_subscriptions"
@ -566,7 +568,7 @@ get "/subscription_manager" do |env|
headers = HTTP::Headers.new headers = HTTP::Headers.new
headers["Cookie"] = env.request.headers["Cookie"] headers["Cookie"] = env.request.headers["Cookie"]
user, sid = get_user(sid, headers, PG_DB, logger) user, sid = get_user(sid, headers, PG_DB)
end end
action_takeout = env.params.query["action_takeout"]?.try &.to_i? action_takeout = env.params.query["action_takeout"]?.try &.to_i?
@ -690,7 +692,7 @@ post "/data_control" do |env|
user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s } user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
end end
@ -759,7 +761,7 @@ post "/data_control" do |env|
end end
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
when "import_freetube" when "import_freetube"
@ -768,7 +770,7 @@ post "/data_control" do |env|
end end
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
when "import_newpipe_subscriptions" when "import_newpipe_subscriptions"
@ -787,7 +789,7 @@ post "/data_control" do |env|
end end
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
when "import_newpipe" when "import_newpipe"
@ -806,7 +808,7 @@ post "/data_control" do |env|
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") } user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
@ -1209,7 +1211,7 @@ get "/feed/subscriptions" do |env|
headers["Cookie"] = env.request.headers["Cookie"] headers["Cookie"] = env.request.headers["Cookie"]
if !user.password if !user.password
user, sid = get_user(sid, headers, PG_DB, logger) user, sid = get_user(sid, headers, PG_DB)
end end
max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE) max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
@ -1513,7 +1515,7 @@ post "/feed/webhook/:token" do |env|
signature = env.request.headers["X-Hub-Signature"].lchop("sha1=") signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body) if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
logger.error("/feed/webhook/#{token} : Invalid signature") LOGGER.error("/feed/webhook/#{token} : Invalid signature")
env.response.status_code = 200 env.response.status_code = 200
next next
end end
@ -2831,7 +2833,7 @@ post "/api/v1/auth/subscriptions/:ucid" do |env|
ucid = env.params.url["ucid"] ucid = env.params.url["ucid"]
if !user.subscriptions.includes? ucid if !user.subscriptions.includes? ucid
get_channel(ucid, PG_DB, logger, false, false) get_channel(ucid, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email) PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
end end
@ -3925,7 +3927,7 @@ add_context_storage_type(Array(String))
add_context_storage_type(Preferences) add_context_storage_type(Preferences)
add_context_storage_type(User) add_context_storage_type(User)
Kemal.config.logger = logger Kemal.config.logger = LOGGER
Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding
Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port
Kemal.run Kemal.run

View file

@ -144,7 +144,7 @@ class ChannelRedirect < Exception
end end
end end
def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = true, max_threads = 10) def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
finished_channel = Channel(String | Nil).new finished_channel = Channel(String | Nil).new
spawn do spawn do
@ -160,7 +160,7 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
active_threads += 1 active_threads += 1
spawn do spawn do
begin begin
get_channel(ucid, db, logger, refresh, pull_all_videos) get_channel(ucid, db, refresh, pull_all_videos)
finished_channel.send(ucid) finished_channel.send(ucid)
rescue ex rescue ex
finished_channel.send(nil) finished_channel.send(nil)
@ -181,10 +181,10 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
return final return final
end end
def get_channel(id, db, logger, refresh = true, pull_all_videos = true) def get_channel(id, db, refresh = true, pull_all_videos = true)
if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel) if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
if refresh && Time.utc - channel.updated > 10.minutes if refresh && Time.utc - channel.updated > 10.minutes
channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos) channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a channel_array = channel.to_a
args = arg_array(channel_array) args = arg_array(channel_array)
@ -192,7 +192,7 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array) ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array)
end end
else else
channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos) channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a channel_array = channel.to_a
args = arg_array(channel_array) args = arg_array(channel_array)
@ -202,12 +202,13 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
return channel return channel
end end
def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil) def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
logger.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}") LOGGER.debug("fetch_channel: #{ucid}")
LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
logger.trace("fetch_channel: #{ucid} : Downloading RSS feed") LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
logger.trace("fetch_channel: #{ucid} : Parsing RSS feed") LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
rss = XML.parse_html(rss) rss = XML.parse_html(rss)
author = rss.xpath_node(%q(//feed/title)) author = rss.xpath_node(%q(//feed/title))
@ -223,11 +224,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
auto_generated = true auto_generated = true
end end
logger.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}") LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
page = 1 page = 1
logger.trace("fetch_channel: #{ucid} : Downloading channel videos page") LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
response = get_channel_videos_response(ucid, page, auto_generated: auto_generated) response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
videos = [] of SearchVideo videos = [] of SearchVideo
@ -235,7 +236,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body).as_a.find &.["response"]?
raise InfoException.new("Could not extract channel JSON") if !initial_data raise InfoException.new("Could not extract channel JSON") if !initial_data
logger.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data") LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
videos = extract_videos(initial_data.as_h, author, ucid) videos = extract_videos(initial_data.as_h, author, ucid)
rescue ex rescue ex
if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") || if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
@ -245,7 +246,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
raise ex raise ex
end end
logger.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed") LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
rss.xpath_nodes("//feed/entry").each do |entry| rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content title = entry.xpath_node("title").not_nil!.content
@ -279,7 +280,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
views: views, views: views,
}) })
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video") LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
# We don't include the 'premiere_timestamp' here because channel pages don't include them, # We don't include the 'premiere_timestamp' here because channel pages don't include them,
# meaning the above timestamp is always null # meaning the above timestamp is always null
@ -289,11 +290,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool) live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
if was_insert if was_insert
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions") LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
db.exec("UPDATE users SET notifications = array_append(notifications, $1), \ db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid) feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid)
else else
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updated") LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
end end
end end

View file

@ -64,11 +64,13 @@ end
class Config class Config
include YAML::Serializable include YAML::Serializable
property channel_threads : Int32 # Number of threads to use for crawling videos from channels (for updating subscriptions) property channel_threads : Int32 = 1 # Number of threads to use for crawling videos from channels (for updating subscriptions)
property feed_threads : Int32 # Number of threads to use for updating feeds property feed_threads : Int32 = 1 # Number of threads to use for updating feeds
property output : String = "STDOUT" # Log file path or STDOUT
property log_level : LogLevel = LogLevel::Info # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
property db : DBConfig # Database configuration property db : DBConfig # Database configuration
property decrypt_polling : Bool = true # Use polling to keep decryption function up to date property decrypt_polling : Bool = true # Use polling to keep decryption function up to date
property full_refresh : Bool # Used for crawling channels: threads should check all videos uploaded by a channel property full_refresh : Bool = false # Used for crawling channels: threads should check all videos uploaded by a channel
property https_only : Bool? # Used to tell Invidious it is behind a proxy, so links to resources should be https:// property https_only : Bool? # Used to tell Invidious it is behind a proxy, so links to resources should be https://
property hmac_key : String? # HMAC signing key for CSRF tokens and verifying pubsub subscriptions property hmac_key : String? # HMAC signing key for CSRF tokens and verifying pubsub subscriptions
property domain : String? # Domain to be used for links to resources on the site where an absolute URL is required property domain : String? # Domain to be used for links to resources on the site where an absolute URL is required
@ -334,11 +336,11 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
items items
end end
def check_enum(db, logger, enum_name, struct_type = nil) def check_enum(db, enum_name, struct_type = nil)
return # TODO return # TODO
if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool) if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
logger.info("check_enum: CREATE TYPE #{enum_name}") LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
db.using_connection do |conn| db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql")) conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@ -346,12 +348,12 @@ def check_enum(db, logger, enum_name, struct_type = nil)
end end
end end
def check_table(db, logger, table_name, struct_type = nil) def check_table(db, table_name, struct_type = nil)
# Create table if it doesn't exist # Create table if it doesn't exist
begin begin
db.exec("SELECT * FROM #{table_name} LIMIT 0") db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex rescue ex
logger.info("check_table: check_table: CREATE TABLE #{table_name}") LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
db.using_connection do |conn| db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql")) conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@ -371,7 +373,7 @@ def check_table(db, logger, table_name, struct_type = nil)
if name != column_array[i]? if name != column_array[i]?
if !column_array[i]? if !column_array[i]?
new_column = column_types.select { |line| line.starts_with? name }[0] new_column = column_types.select { |line| line.starts_with? name }[0]
logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next next
end end
@ -389,29 +391,29 @@ def check_table(db, logger, table_name, struct_type = nil)
# There's a column we didn't expect # There's a column we didn't expect
if !new_column if !new_column
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}") LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name) column_array = get_column_array(db, table_name)
next next
end end
logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}") db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}") db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name) column_array = get_column_array(db, table_name)
end end
else else
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end end
end end
@ -421,7 +423,7 @@ def check_table(db, logger, table_name, struct_type = nil)
column_array.each do |column| column_array.each do |column|
if !struct_array.includes? column if !struct_array.includes? column
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end end
end end

View file

@ -1,14 +1,14 @@
require "logger" require "logger"
enum LogLevel enum LogLevel
All All = 0
Trace Trace = 1
Debug Debug = 2
Info Info = 3
Warn Warn = 4
Error Error = 5
Fatal Fatal = 6
Off Off = 7
end end
class Invidious::LogHandler < Kemal::BaseLogHandler class Invidious::LogHandler < Kemal::BaseLogHandler

View file

@ -1,8 +1,7 @@
class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
private getter logger : Invidious::LogHandler
private getter config : Config private getter config : Config
def initialize(@logger, @config) def initialize(@config)
end end
def begin def begin
@ -127,7 +126,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
end end
end end
rescue ex rescue ex
logger.error("BypassCaptchaJob: #{ex.message}") LOGGER.error("BypassCaptchaJob: #{ex.message}")
ensure ensure
sleep 1.minute sleep 1.minute
Fiber.yield Fiber.yield

View file

@ -1,9 +1,8 @@
class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter config : Config private getter config : Config
def initialize(@db, @logger, @config) def initialize(@db, @config)
end end
def begin def begin
@ -14,37 +13,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
backoff = 1.seconds backoff = 1.seconds
loop do loop do
logger.debug("RefreshChannelsJob: Refreshing all channels") LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
db.query("SELECT id FROM channels ORDER BY updated") do |rs| db.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do rs.each do
id = rs.read(String) id = rs.read(String)
if active_fibers >= lim_fibers if active_fibers >= lim_fibers
logger.trace("RefreshChannelsJob: Fiber limit reached, waiting...") LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
if active_channel.receive if active_channel.receive
logger.trace("RefreshChannelsJob: Fiber limit ok, continuing") LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing")
active_fibers -= 1 active_fibers -= 1
end end
end end
logger.trace("RefreshChannelsJob: #{id} : Spawning fiber") LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber")
active_fibers += 1 active_fibers += 1
spawn do spawn do
begin begin
logger.trace("RefreshChannelsJob: #{id} fiber : Fetching channel") LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
channel = fetch_channel(id, db, logger, config.full_refresh) channel = fetch_channel(id, db, config.full_refresh)
lim_fibers = max_fibers lim_fibers = max_fibers
logger.trace("RefreshChannelsJob: #{id} fiber : Updating DB") LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id) db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
rescue ex rescue ex
logger.error("RefreshChannelsJob: #{id} : #{ex.message}") LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
if ex.message == "Deleted or invalid channel" if ex.message == "Deleted or invalid channel"
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id) db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
else else
lim_fibers = 1 lim_fibers = 1
logger.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s") LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
sleep backoff sleep backoff
if backoff < 1.days if backoff < 1.days
backoff += backoff backoff += backoff
@ -53,14 +52,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
end end
end end
ensure ensure
logger.trace("RefreshChannelsJob: #{id} fiber : Done") LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done")
active_channel.send(true) active_channel.send(true)
end end
end end
end end
end end
logger.debug("RefreshChannelsJob: Done, sleeping for one minute") LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute")
sleep 1.minute sleep 1.minute
Fiber.yield Fiber.yield
end end

View file

@ -1,9 +1,8 @@
class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter config : Config private getter config : Config
def initialize(@db, @logger, @config) def initialize(@db, @config)
end end
def begin def begin
@ -30,14 +29,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
column_array = get_column_array(db, view_name) column_array = get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i| ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]? if name != column_array[i]?
logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}") LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
db.exec("DROP MATERIALIZED VIEW #{view_name}") db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist" raise "view does not exist"
end end
end end
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))" if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...") LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
db.exec("DROP MATERIALIZED VIEW #{view_name}") db.exec("DROP MATERIALIZED VIEW #{view_name}")
end end
@ -49,18 +48,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}" legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0") db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}") LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}") db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex rescue ex
begin begin
# While iterating through, we may have an email stored from a deleted account # While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool) if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
logger.info("RefreshFeedsJob: CREATE #{view_name}") LOGGER.info("RefreshFeedsJob: CREATE #{view_name}")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}") db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email) db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end end
rescue ex rescue ex
logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}") LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
end end
end end
end end

View file

@ -1,10 +1,9 @@
class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter hmac_key : String private getter hmac_key : String
private getter config : Config private getter config : Config
def initialize(@db, @logger, @config, @hmac_key) def initialize(@db, @config, @hmac_key)
end end
def begin def begin
@ -34,10 +33,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
response = subscribe_pubsub(ucid, hmac_key, config) response = subscribe_pubsub(ucid, hmac_key, config)
if response.status_code >= 400 if response.status_code >= 400
logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}") LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
end end
rescue ex rescue ex
logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}") LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
end end
active_channel.send(true) active_channel.send(true)

View file

@ -1,15 +1,10 @@
class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
private getter logger : Invidious::LogHandler
def initialize(@logger)
end
def begin def begin
loop do loop do
begin begin
DECRYPT_FUNCTION.update_decrypt_function DECRYPT_FUNCTION.update_decrypt_function
rescue ex rescue ex
logger.error("UpdateDecryptFunctionJob : #{ex.message}") LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
ensure ensure
sleep 1.minute sleep 1.minute
Fiber.yield Fiber.yield

View file

@ -1,7 +1,6 @@
abstract class Invidious::Routes::BaseRoute abstract class Invidious::Routes::BaseRoute
private getter config : Config private getter config : Config
private getter logger : Invidious::LogHandler
def initialize(@config, @logger) def initialize(@config)
end end
end end

View file

@ -267,7 +267,7 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
raise "Couldn't get SID." raise "Couldn't get SID."
end end
user, sid = get_user(sid, headers, PG_DB, logger) user, sid = get_user(sid, headers, PG_DB)
# We are now logged in # We are now logged in
traceback << "done.<br/>" traceback << "done.<br/>"

View file

@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
rescue ex : VideoRedirect rescue ex : VideoRedirect
return env.redirect env.request.resource.gsub(id, ex.video_id) return env.redirect env.request.resource.gsub(id, ex.video_id)
rescue ex rescue ex
logger.error("get_video: #{id} : #{ex.message}") LOGGER.error("get_video: #{id} : #{ex.message}")
return error_template(500, ex) return error_template(500, ex)
end end

View file

@ -1,14 +1,14 @@
module Invidious::Routing module Invidious::Routing
macro get(path, controller, method = :handle) macro get(path, controller, method = :handle)
get {{ path }} do |env| get {{ path }} do |env|
controller_instance = {{ controller }}.new(config, logger) controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env) controller_instance.{{ method.id }}(env)
end end
end end
macro post(path, controller, method = :handle) macro post(path, controller, method = :handle)
post {{ path }} do |env| post {{ path }} do |env|
controller_instance = {{ controller }}.new(config, logger) controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env) controller_instance.{{ method.id }}(env)
end end
end end

View file

@ -269,12 +269,12 @@ struct Preferences
end end
end end
def get_user(sid, headers, db, logger, refresh = true) def get_user(sid, headers, db, refresh = true)
if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String) if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User) user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
if refresh && Time.utc - user.updated > 1.minute if refresh && Time.utc - user.updated > 1.minute
user, sid = fetch_user(sid, headers, db, logger) user, sid = fetch_user(sid, headers, db)
user_array = user.to_a user_array = user.to_a
user_array[4] = user_array[4].to_json # User preferences user_array[4] = user_array[4].to_json # User preferences
args = arg_array(user_array) args = arg_array(user_array)
@ -292,7 +292,7 @@ def get_user(sid, headers, db, logger, refresh = true)
end end
end end
else else
user, sid = fetch_user(sid, headers, db, logger) user, sid = fetch_user(sid, headers, db)
user_array = user.to_a user_array = user.to_a
user_array[4] = user_array[4].to_json # User preferences user_array[4] = user_array[4].to_json # User preferences
args = arg_array(user.to_a) args = arg_array(user.to_a)
@ -313,7 +313,7 @@ def get_user(sid, headers, db, logger, refresh = true)
return user, sid return user, sid
end end
def fetch_user(sid, headers, db, logger) def fetch_user(sid, headers, db)
feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers) feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
feed = XML.parse_html(feed.body) feed = XML.parse_html(feed.body)
@ -326,7 +326,7 @@ def fetch_user(sid, headers, db, logger)
end end
end end
channels = get_batch_channels(channels, db, logger, false, false) channels = get_batch_channels(channels, db, false, false)
email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"])) email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
if email if email