Make logger a constant

Instead of passing around `logger` there is now the global `LOGGER`.
This commit is contained in:
saltycrys 2021-01-04 16:51:06 +01:00
parent 7a8620a570
commit 6365ee7487
13 changed files with 103 additions and 115 deletions

View file

@ -144,7 +144,7 @@ class ChannelRedirect < Exception
end
end
def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = true, max_threads = 10)
def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
finished_channel = Channel(String | Nil).new
spawn do
@ -160,7 +160,7 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
active_threads += 1
spawn do
begin
get_channel(ucid, db, logger, refresh, pull_all_videos)
get_channel(ucid, db, refresh, pull_all_videos)
finished_channel.send(ucid)
rescue ex
finished_channel.send(nil)
@ -181,10 +181,10 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
return final
end
def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
def get_channel(id, db, refresh = true, pull_all_videos = true)
if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
if refresh && Time.utc - channel.updated > 10.minutes
channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos)
channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)
@ -192,7 +192,7 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array)
end
else
channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos)
channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)
@ -202,12 +202,12 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
return channel
end
def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
logger.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
logger.trace("fetch_channel: #{ucid} : Downloading RSS feed")
LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
logger.trace("fetch_channel: #{ucid} : Parsing RSS feed")
LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
rss = XML.parse_html(rss)
author = rss.xpath_node(%q(//feed/title))
@ -223,11 +223,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
auto_generated = true
end
logger.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
page = 1
logger.trace("fetch_channel: #{ucid} : Downloading channel videos page")
LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
videos = [] of SearchVideo
@ -235,7 +235,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
initial_data = JSON.parse(response.body).as_a.find &.["response"]?
raise InfoException.new("Could not extract channel JSON") if !initial_data
logger.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
videos = extract_videos(initial_data.as_h, author, ucid)
rescue ex
if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
@ -245,7 +245,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
raise ex
end
logger.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
rss.xpath_nodes("//feed/entry").each do |entry|
video_id = entry.xpath_node("videoid").not_nil!.content
title = entry.xpath_node("title").not_nil!.content
@ -279,7 +279,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
views: views,
})
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
# We don't include the 'premiere_timestamp' here because channel pages don't include them,
# meaning the above timestamp is always null
@ -289,11 +289,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
if was_insert
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid)
else
logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
end
end

View file

@ -336,11 +336,11 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
items
end
def check_enum(db, logger, enum_name, struct_type = nil)
def check_enum(db, enum_name, struct_type = nil)
return # TODO
if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
logger.info("check_enum: CREATE TYPE #{enum_name}")
LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@ -348,12 +348,12 @@ def check_enum(db, logger, enum_name, struct_type = nil)
end
end
def check_table(db, logger, table_name, struct_type = nil)
def check_table(db, table_name, struct_type = nil)
# Create table if it doesn't exist
begin
db.exec("SELECT * FROM #{table_name} LIMIT 0")
rescue ex
logger.info("check_table: check_table: CREATE TABLE #{table_name}")
LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
db.using_connection do |conn|
conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@ -373,7 +373,7 @@ def check_table(db, logger, table_name, struct_type = nil)
if name != column_array[i]?
if !column_array[i]?
new_column = column_types.select { |line| line.starts_with? name }[0]
logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next
end
@ -391,29 +391,29 @@ def check_table(db, logger, table_name, struct_type = nil)
# There's a column we didn't expect
if !new_column
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
column_array = get_column_array(db, table_name)
next
end
logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
column_array = get_column_array(db, table_name)
end
else
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
end
end
@ -423,7 +423,7 @@ def check_table(db, logger, table_name, struct_type = nil)
column_array.each do |column|
if !struct_array.includes? column
logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
end
end

View file

@ -1,8 +1,7 @@
class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
private getter logger : Invidious::LogHandler
private getter config : Config
def initialize(@logger, @config)
def initialize(@config)
end
def begin
@ -127,7 +126,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
end
end
rescue ex
logger.error("BypassCaptchaJob: #{ex.message}")
LOGGER.error("BypassCaptchaJob: #{ex.message}")
ensure
sleep 1.minute
Fiber.yield

View file

@ -1,9 +1,8 @@
class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter config : Config
def initialize(@db, @logger, @config)
def initialize(@db, @config)
end
def begin
@ -14,37 +13,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
backoff = 1.seconds
loop do
logger.debug("RefreshChannelsJob: Refreshing all channels")
LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
db.query("SELECT id FROM channels ORDER BY updated") do |rs|
rs.each do
id = rs.read(String)
if active_fibers >= lim_fibers
logger.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
if active_channel.receive
logger.trace("RefreshChannelsJob: Fiber limit ok, continuing")
LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing")
active_fibers -= 1
end
end
logger.trace("RefreshChannelsJob: #{id} : Spawning fiber")
LOGGER.trace("RefreshChannelsJob: #{id} : Spawning fiber")
active_fibers += 1
spawn do
begin
logger.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
channel = fetch_channel(id, db, logger, config.full_refresh)
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
channel = fetch_channel(id, db, config.full_refresh)
lim_fibers = max_fibers
logger.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
rescue ex
logger.error("RefreshChannelsJob: #{id} : #{ex.message}")
LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
if ex.message == "Deleted or invalid channel"
db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
else
lim_fibers = 1
logger.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
sleep backoff
if backoff < 1.days
backoff += backoff
@ -53,14 +52,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
end
end
ensure
logger.trace("RefreshChannelsJob: #{id} fiber : Done")
LOGGER.trace("RefreshChannelsJob: #{id} fiber : Done")
active_channel.send(true)
end
end
end
end
logger.debug("RefreshChannelsJob: Done, sleeping for one minute")
LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute")
sleep 1.minute
Fiber.yield
end

View file

@ -1,9 +1,8 @@
class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter config : Config
def initialize(@db, @logger, @config)
def initialize(@db, @config)
end
def begin
@ -30,14 +29,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
column_array = get_column_array(db, view_name)
ChannelVideo.type_array.each_with_index do |name, i|
if name != column_array[i]?
logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
raise "view does not exist"
end
end
if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
db.exec("DROP MATERIALIZED VIEW #{view_name}")
end
@ -49,18 +48,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
rescue ex
begin
# While iterating through, we may have an email stored from a deleted account
if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
logger.info("RefreshFeedsJob: CREATE #{view_name}")
LOGGER.info("RefreshFeedsJob: CREATE #{view_name}")
db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
end
rescue ex
logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
end
end
end

View file

@ -1,10 +1,9 @@
class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
private getter db : DB::Database
private getter logger : Invidious::LogHandler
private getter hmac_key : String
private getter config : Config
def initialize(@db, @logger, @config, @hmac_key)
def initialize(@db, @config, @hmac_key)
end
def begin
@ -34,10 +33,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
response = subscribe_pubsub(ucid, hmac_key, config)
if response.status_code >= 400
logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
end
rescue ex
logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
end
active_channel.send(true)

View file

@ -1,15 +1,10 @@
class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
private getter logger : Invidious::LogHandler
def initialize(@logger)
end
def begin
loop do
begin
DECRYPT_FUNCTION.update_decrypt_function
rescue ex
logger.error("UpdateDecryptFunctionJob : #{ex.message}")
LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
ensure
sleep 1.minute
Fiber.yield

View file

@ -1,7 +1,6 @@
abstract class Invidious::Routes::BaseRoute
private getter config : Config
private getter logger : Invidious::LogHandler
def initialize(@config, @logger)
def initialize(@config)
end
end

View file

@ -267,7 +267,7 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
raise "Couldn't get SID."
end
user, sid = get_user(sid, headers, PG_DB, logger)
user, sid = get_user(sid, headers, PG_DB)
# We are now logged in
traceback << "done.<br/>"

View file

@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
rescue ex : VideoRedirect
return env.redirect env.request.resource.gsub(id, ex.video_id)
rescue ex
logger.error("get_video: #{id} : #{ex.message}")
LOGGER.error("get_video: #{id} : #{ex.message}")
return error_template(500, ex)
end

View file

@ -1,14 +1,14 @@
module Invidious::Routing
macro get(path, controller, method = :handle)
get {{ path }} do |env|
controller_instance = {{ controller }}.new(config, logger)
controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env)
end
end
macro post(path, controller, method = :handle)
post {{ path }} do |env|
controller_instance = {{ controller }}.new(config, logger)
controller_instance = {{ controller }}.new(config)
controller_instance.{{ method.id }}(env)
end
end

View file

@ -269,12 +269,12 @@ struct Preferences
end
end
def get_user(sid, headers, db, logger, refresh = true)
def get_user(sid, headers, db, refresh = true)
if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
if refresh && Time.utc - user.updated > 1.minute
user, sid = fetch_user(sid, headers, db, logger)
user, sid = fetch_user(sid, headers, db)
user_array = user.to_a
user_array[4] = user_array[4].to_json # User preferences
args = arg_array(user_array)
@ -292,7 +292,7 @@ def get_user(sid, headers, db, logger, refresh = true)
end
end
else
user, sid = fetch_user(sid, headers, db, logger)
user, sid = fetch_user(sid, headers, db)
user_array = user.to_a
user_array[4] = user_array[4].to_json # User preferences
args = arg_array(user.to_a)
@ -313,7 +313,7 @@ def get_user(sid, headers, db, logger, refresh = true)
return user, sid
end
def fetch_user(sid, headers, db, logger)
def fetch_user(sid, headers, db)
feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
feed = XML.parse_html(feed.body)
@ -326,7 +326,7 @@ def fetch_user(sid, headers, db, logger)
end
end
channels = get_batch_channels(channels, db, logger, false, false)
channels = get_batch_channels(channels, db, false, false)
email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
if email