From 7a8620a570f1004f7db3f018f68fd0364f77bef6 Mon Sep 17 00:00:00 2001
From: saltycrys <73420320+saltycrys@users.noreply.github.com>
Date: Mon, 4 Jan 2021 16:05:15 +0100
Subject: [PATCH 1/3] Add CLI arguments to config file

The log level can now be set with `log_level` (accepts ints and strings).
The log file can now be set with `output` (also accepts `STDOUT`).
---
 src/invidious.cr                 | 20 ++++++++++++--------
 src/invidious/helpers/helpers.cr |  8 +++++---
 src/invidious/helpers/logger.cr  | 16 ++++++++--------
 3 files changed, 25 insertions(+), 19 deletions(-)

diff --git a/src/invidious.cr b/src/invidious.cr
index 5d19acf1..e32cb896 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -107,8 +107,6 @@ LOCALES = {
 YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0)
 
 config = CONFIG
-output = STDOUT
-loglvl = LogLevel::Debug
 
 Kemal.config.extra_options do |parser|
   parser.banner = "Usage: invidious [arguments]"
@@ -128,12 +126,11 @@ Kemal.config.extra_options do |parser|
       exit
     end
   end
-  parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: STDOUT)") do |output_arg|
-    FileUtils.mkdir_p(File.dirname(output_arg))
-    output = File.open(output_arg, mode: "a")
+  parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{config.output})") do |output|
+    config.output = output
   end
-  parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{loglvl})") do |loglvl_arg|
-    loglvl = LogLevel.parse(loglvl_arg)
+  parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{config.log_level})") do |log_level|
+    config.log_level = LogLevel.parse(log_level)
   end
   parser.on("-v", "--version", "Print version") do
     puts SOFTWARE.to_pretty_json
@@ -143,7 +140,14 @@ end
 
 Kemal::CLI.new ARGV
 
-logger = Invidious::LogHandler.new(output, loglvl)
+if config.output.upcase == "STDOUT"
+  output = STDOUT
+else
+  FileUtils.mkdir_p(File.dirname(config.output))
+  output = File.open(config.output, mode: "a")
+end
+
+logger = Invidious::LogHandler.new(output, config.log_level)
 
 # Check table integrity
 if CONFIG.check_tables
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index a6651a31..1866f8e5 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -64,11 +64,13 @@ end
 class Config
   include YAML::Serializable
 
-  property channel_threads : Int32                 # Number of threads to use for crawling videos from channels (for updating subscriptions)
-  property feed_threads : Int32                    # Number of threads to use for updating feeds
+  property channel_threads : Int32 = 1             # Number of threads to use for crawling videos from channels (for updating subscriptions)
+  property feed_threads : Int32 = 1                # Number of threads to use for updating feeds
+  property output : String = "STDOUT"              # Log file path or STDOUT
+  property log_level : LogLevel = LogLevel::Debug  # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
   property db : DBConfig                           # Database configuration
   property decrypt_polling : Bool = true           # Use polling to keep decryption function up to date
-  property full_refresh : Bool                     # Used for crawling channels: threads should check all videos uploaded by a channel
+  property full_refresh : Bool = false             # Used for crawling channels: threads should check all videos uploaded by a channel
   property https_only : Bool?                      # Used to tell Invidious it is behind a proxy, so links to resources should be https://
   property hmac_key : String?                      # HMAC signing key for CSRF tokens and verifying pubsub subscriptions
   property domain : String?                        # Domain to be used for links to resources on the site where an absolute URL is required
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index 4e4d7306..7c5b0247 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -1,14 +1,14 @@
 require "logger"
 
 enum LogLevel
-  All
-  Trace
-  Debug
-  Info
-  Warn
-  Error
-  Fatal
-  Off
+  All   = 0
+  Trace = 1
+  Debug = 2
+  Info  = 3
+  Warn  = 4
+  Error = 5
+  Fatal = 6
+  Off   = 7
 end
 
 class Invidious::LogHandler < Kemal::BaseLogHandler

From 6365ee7487d86d94f385bfab72a2cc3260cb6690 Mon Sep 17 00:00:00 2001
From: saltycrys <73420320+saltycrys@users.noreply.github.com>
Date: Mon, 4 Jan 2021 16:51:06 +0100
Subject: [PATCH 2/3] Make logger a constant

Instead of passing around `logger` there is now the global `LOGGER`.
---
 src/invidious.cr                              | 84 +++++++++----------
 src/invidious/channels.cr                     | 32 +++----
 src/invidious/helpers/helpers.cr              | 24 +++---
 src/invidious/jobs/bypass_captcha_job.cr      |  5 +-
 src/invidious/jobs/refresh_channels_job.cr    | 25 +++---
 src/invidious/jobs/refresh_feeds_job.cr       | 13 ++-
 src/invidious/jobs/subscribe_to_feeds_job.cr  |  7 +-
 .../jobs/update_decrypt_function_job.cr       |  7 +-
 src/invidious/routes/base_route.cr            |  3 +-
 src/invidious/routes/login.cr                 |  2 +-
 src/invidious/routes/watch.cr                 |  2 +-
 src/invidious/routing.cr                      |  4 +-
 src/invidious/users.cr                        | 10 +--
 13 files changed, 103 insertions(+), 115 deletions(-)

diff --git a/src/invidious.cr b/src/invidious.cr
index e32cb896..2b915350 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -106,31 +106,30 @@ LOCALES = {
 
 YT_POOL = QUICPool.new(YT_URL, capacity: CONFIG.pool_size, timeout: 2.0)
 
-config = CONFIG
-
+# CLI
 Kemal.config.extra_options do |parser|
   parser.banner = "Usage: invidious [arguments]"
-  parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{config.channel_threads})") do |number|
+  parser.on("-c THREADS", "--channel-threads=THREADS", "Number of threads for refreshing channels (default: #{CONFIG.channel_threads})") do |number|
     begin
-      config.channel_threads = number.to_i
+      CONFIG.channel_threads = number.to_i
     rescue ex
       puts "THREADS must be integer"
       exit
     end
   end
-  parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{config.feed_threads})") do |number|
+  parser.on("-f THREADS", "--feed-threads=THREADS", "Number of threads for refreshing feeds (default: #{CONFIG.feed_threads})") do |number|
     begin
-      config.feed_threads = number.to_i
+      CONFIG.feed_threads = number.to_i
     rescue ex
       puts "THREADS must be integer"
       exit
     end
   end
-  parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{config.output})") do |output|
-    config.output = output
+  parser.on("-o OUTPUT", "--output=OUTPUT", "Redirect output (default: #{CONFIG.output})") do |output|
+    CONFIG.output = output
   end
-  parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{config.log_level})") do |log_level|
-    config.log_level = LogLevel.parse(log_level)
+  parser.on("-l LEVEL", "--log-level=LEVEL", "Log level, one of #{LogLevel.values} (default: #{CONFIG.log_level})") do |log_level|
+    CONFIG.log_level = LogLevel.parse(log_level)
   end
   parser.on("-v", "--version", "Print version") do
     puts SOFTWARE.to_pretty_json
@@ -140,42 +139,41 @@ end
 
 Kemal::CLI.new ARGV
 
-if config.output.upcase == "STDOUT"
-  output = STDOUT
-else
-  FileUtils.mkdir_p(File.dirname(config.output))
-  output = File.open(config.output, mode: "a")
+if CONFIG.output.upcase != "STDOUT"
+  FileUtils.mkdir_p(File.dirname(CONFIG.output))
 end
+OUTPUT = CONFIG.output.upcase == "STDOUT" ? STDOUT : File.open(CONFIG.output, mode: "a")
+LOGGER = Invidious::LogHandler.new(OUTPUT, CONFIG.log_level)
 
-logger = Invidious::LogHandler.new(output, config.log_level)
+config = CONFIG
 
 # Check table integrity
 if CONFIG.check_tables
-  check_enum(PG_DB, logger, "privacy", PlaylistPrivacy)
+  check_enum(PG_DB, "privacy", PlaylistPrivacy)
 
-  check_table(PG_DB, logger, "channels", InvidiousChannel)
-  check_table(PG_DB, logger, "channel_videos", ChannelVideo)
-  check_table(PG_DB, logger, "playlists", InvidiousPlaylist)
-  check_table(PG_DB, logger, "playlist_videos", PlaylistVideo)
-  check_table(PG_DB, logger, "nonces", Nonce)
-  check_table(PG_DB, logger, "session_ids", SessionId)
-  check_table(PG_DB, logger, "users", User)
-  check_table(PG_DB, logger, "videos", Video)
+  check_table(PG_DB, "channels", InvidiousChannel)
+  check_table(PG_DB, "channel_videos", ChannelVideo)
+  check_table(PG_DB, "playlists", InvidiousPlaylist)
+  check_table(PG_DB, "playlist_videos", PlaylistVideo)
+  check_table(PG_DB, "nonces", Nonce)
+  check_table(PG_DB, "session_ids", SessionId)
+  check_table(PG_DB, "users", User)
+  check_table(PG_DB, "videos", Video)
 
   if CONFIG.cache_annotations
-    check_table(PG_DB, logger, "annotations", Annotation)
+    check_table(PG_DB, "annotations", Annotation)
   end
 end
 
 # Start jobs
 
-Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, logger, config)
-Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, logger, config)
-Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, logger, config, HMAC_KEY)
+Invidious::Jobs.register Invidious::Jobs::RefreshChannelsJob.new(PG_DB, config)
+Invidious::Jobs.register Invidious::Jobs::RefreshFeedsJob.new(PG_DB, config)
+Invidious::Jobs.register Invidious::Jobs::SubscribeToFeedsJob.new(PG_DB, config, HMAC_KEY)
 
 DECRYPT_FUNCTION = DecryptFunction.new(CONFIG.decrypt_polling)
 if config.decrypt_polling
-  Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new(logger)
+  Invidious::Jobs.register Invidious::Jobs::UpdateDecryptFunctionJob.new
 end
 
 if config.statistics_enabled
@@ -187,7 +185,7 @@ if config.popular_enabled
 end
 
 if config.captcha_key
-  Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(logger, config)
+  Invidious::Jobs.register Invidious::Jobs::BypassCaptchaJob.new(config)
 end
 
 connection_channel = Channel({Bool, Channel(PQ::Notification)}).new(32)
@@ -265,7 +263,7 @@ before_all do |env|
       headers["Cookie"] = env.request.headers["Cookie"]
 
       begin
-        user, sid = get_user(sid, headers, PG_DB, logger, false)
+        user, sid = get_user(sid, headers, PG_DB, false)
         csrf_token = generate_response(sid, {
           ":authorize_token",
           ":playlist_ajax",
@@ -535,7 +533,7 @@ post "/subscription_ajax" do |env|
   case action
   when "action_create_subscription_to_channel"
     if !user.subscriptions.includes? channel_id
-      get_channel(channel_id, PG_DB, logger, false, false)
+      get_channel(channel_id, PG_DB, false, false)
       PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email)
     end
   when "action_remove_subscriptions"
@@ -570,7 +568,7 @@ get "/subscription_manager" do |env|
     headers = HTTP::Headers.new
     headers["Cookie"] = env.request.headers["Cookie"]
 
-    user, sid = get_user(sid, headers, PG_DB, logger)
+    user, sid = get_user(sid, headers, PG_DB)
   end
 
   action_takeout = env.params.query["action_takeout"]?.try &.to_i?
@@ -694,7 +692,7 @@ post "/data_control" do |env|
           user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s }
           user.subscriptions.uniq!
 
-          user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false)
+          user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
 
           PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
         end
@@ -763,7 +761,7 @@ post "/data_control" do |env|
         end
         user.subscriptions.uniq!
 
-        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false)
+        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
 
         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
       when "import_freetube"
@@ -772,7 +770,7 @@ post "/data_control" do |env|
         end
         user.subscriptions.uniq!
 
-        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false)
+        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
 
         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
       when "import_newpipe_subscriptions"
@@ -791,7 +789,7 @@ post "/data_control" do |env|
         end
         user.subscriptions.uniq!
 
-        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false)
+        user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
 
         PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
       when "import_newpipe"
@@ -810,7 +808,7 @@ post "/data_control" do |env|
               user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
               user.subscriptions.uniq!
 
-              user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, logger, false, false)
+              user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
 
               PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
 
@@ -1213,7 +1211,7 @@ get "/feed/subscriptions" do |env|
   headers["Cookie"] = env.request.headers["Cookie"]
 
   if !user.password
-    user, sid = get_user(sid, headers, PG_DB, logger)
+    user, sid = get_user(sid, headers, PG_DB)
   end
 
   max_results = env.params.query["max_results"]?.try &.to_i?.try &.clamp(0, MAX_ITEMS_PER_PAGE)
@@ -1517,7 +1515,7 @@ post "/feed/webhook/:token" do |env|
   signature = env.request.headers["X-Hub-Signature"].lchop("sha1=")
 
   if signature != OpenSSL::HMAC.hexdigest(:sha1, HMAC_KEY, body)
-    logger.error("/feed/webhook/#{token} : Invalid signature")
+    LOGGER.error("/feed/webhook/#{token} : Invalid signature")
     env.response.status_code = 200
     next
   end
@@ -2835,7 +2833,7 @@ post "/api/v1/auth/subscriptions/:ucid" do |env|
   ucid = env.params.url["ucid"]
 
   if !user.subscriptions.includes? ucid
-    get_channel(ucid, PG_DB, logger, false, false)
+    get_channel(ucid, PG_DB, false, false)
     PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions,$1) WHERE email = $2", ucid, user.email)
   end
 
@@ -3929,7 +3927,7 @@ add_context_storage_type(Array(String))
 add_context_storage_type(Preferences)
 add_context_storage_type(User)
 
-Kemal.config.logger = logger
+Kemal.config.logger = LOGGER
 Kemal.config.host_binding = Kemal.config.host_binding != "0.0.0.0" ? Kemal.config.host_binding : CONFIG.host_binding
 Kemal.config.port = Kemal.config.port != 3000 ? Kemal.config.port : CONFIG.port
 Kemal.run
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 6907ff3d..274e86f9 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -144,7 +144,7 @@ class ChannelRedirect < Exception
   end
 end
 
-def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos = true, max_threads = 10)
+def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
   finished_channel = Channel(String | Nil).new
 
   spawn do
@@ -160,7 +160,7 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
       active_threads += 1
       spawn do
         begin
-          get_channel(ucid, db, logger, refresh, pull_all_videos)
+          get_channel(ucid, db, refresh, pull_all_videos)
           finished_channel.send(ucid)
         rescue ex
           finished_channel.send(nil)
@@ -181,10 +181,10 @@ def get_batch_channels(channels, db, logger, refresh = false, pull_all_videos =
   return final
 end
 
-def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
+def get_channel(id, db, refresh = true, pull_all_videos = true)
   if channel = db.query_one?("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
     if refresh && Time.utc - channel.updated > 10.minutes
-      channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos)
+      channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
       channel_array = channel.to_a
       args = arg_array(channel_array)
 
@@ -192,7 +192,7 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
         ON CONFLICT (id) DO UPDATE SET author = $2, updated = $3", args: channel_array)
     end
   else
-    channel = fetch_channel(id, db, logger, pull_all_videos: pull_all_videos)
+    channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
     channel_array = channel.to_a
     args = arg_array(channel_array)
 
@@ -202,12 +202,12 @@ def get_channel(id, db, logger, refresh = true, pull_all_videos = true)
   return channel
 end
 
-def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
-  logger.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
+def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
+  LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
 
-  logger.trace("fetch_channel: #{ucid} : Downloading RSS feed")
+  LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
   rss = YT_POOL.client &.get("/feeds/videos.xml?channel_id=#{ucid}").body
-  logger.trace("fetch_channel: #{ucid} : Parsing RSS feed")
+  LOGGER.trace("fetch_channel: #{ucid} : Parsing RSS feed")
   rss = XML.parse_html(rss)
 
   author = rss.xpath_node(%q(//feed/title))
@@ -223,11 +223,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
     auto_generated = true
   end
 
-  logger.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
+  LOGGER.trace("fetch_channel: #{ucid} : author = #{author}, auto_generated = #{auto_generated}")
 
   page = 1
 
-  logger.trace("fetch_channel: #{ucid} : Downloading channel videos page")
+  LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
   response = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
 
   videos = [] of SearchVideo
@@ -235,7 +235,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
     initial_data = JSON.parse(response.body).as_a.find &.["response"]?
     raise InfoException.new("Could not extract channel JSON") if !initial_data
 
-    logger.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
+    LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
     videos = extract_videos(initial_data.as_h, author, ucid)
   rescue ex
     if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
@@ -245,7 +245,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
     raise ex
   end
 
-  logger.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
+  LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
   rss.xpath_nodes("//feed/entry").each do |entry|
     video_id = entry.xpath_node("videoid").not_nil!.content
     title = entry.xpath_node("title").not_nil!.content
@@ -279,7 +279,7 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
       views:              views,
     })
 
-    logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
+    LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updating or inserting video")
 
     # We don't include the 'premiere_timestamp' here because channel pages don't include them,
     # meaning the above timestamp is always null
@@ -289,11 +289,11 @@ def fetch_channel(ucid, db, logger, pull_all_videos = true, locale = nil)
       live_now = $8, views = $10 returning (xmax=0) as was_insert", *video.to_tuple, as: Bool)
 
     if was_insert
-      logger.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
+      LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Inserted, updating subscriptions")
       db.exec("UPDATE users SET notifications = array_append(notifications, $1), \
         feed_needs_update = true WHERE $2 = ANY(subscriptions)", video.id, video.ucid)
     else
-      logger.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
+      LOGGER.trace("fetch_channel: #{ucid} : video #{video_id} : Updated")
     end
   end
 
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 1866f8e5..cf965dad 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -336,11 +336,11 @@ def extract_items(initial_data : Hash(String, JSON::Any), author_fallback : Stri
   items
 end
 
-def check_enum(db, logger, enum_name, struct_type = nil)
+def check_enum(db, enum_name, struct_type = nil)
   return # TODO
 
   if !db.query_one?("SELECT true FROM pg_type WHERE typname = $1", enum_name, as: Bool)
-    logger.info("check_enum: CREATE TYPE #{enum_name}")
+    LOGGER.info("check_enum: CREATE TYPE #{enum_name}")
 
     db.using_connection do |conn|
       conn.as(PG::Connection).exec_all(File.read("config/sql/#{enum_name}.sql"))
@@ -348,12 +348,12 @@ def check_enum(db, logger, enum_name, struct_type = nil)
   end
 end
 
-def check_table(db, logger, table_name, struct_type = nil)
+def check_table(db, table_name, struct_type = nil)
   # Create table if it doesn't exist
   begin
     db.exec("SELECT * FROM #{table_name} LIMIT 0")
   rescue ex
-    logger.info("check_table: check_table: CREATE TABLE #{table_name}")
+    LOGGER.info("check_table: check_table: CREATE TABLE #{table_name}")
 
     db.using_connection do |conn|
       conn.as(PG::Connection).exec_all(File.read("config/sql/#{table_name}.sql"))
@@ -373,7 +373,7 @@ def check_table(db, logger, table_name, struct_type = nil)
     if name != column_array[i]?
       if !column_array[i]?
         new_column = column_types.select { |line| line.starts_with? name }[0]
-        logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+        LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
         db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
         next
       end
@@ -391,29 +391,29 @@ def check_table(db, logger, table_name, struct_type = nil)
 
           # There's a column we didn't expect
           if !new_column
-            logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
+            LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]}")
             db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
 
             column_array = get_column_array(db, table_name)
             next
           end
 
-          logger.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
+          LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
           db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
 
-          logger.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
+          LOGGER.info("check_table: UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
           db.exec("UPDATE #{table_name} SET #{column_array[i]}_new=#{column_array[i]}")
 
-          logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+          LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
           db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
 
-          logger.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
+          LOGGER.info("check_table: ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
           db.exec("ALTER TABLE #{table_name} RENAME COLUMN #{column_array[i]}_new TO #{column_array[i]}")
 
           column_array = get_column_array(db, table_name)
         end
       else
-        logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
+        LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
         db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column_array[i]} CASCADE")
       end
     end
@@ -423,7 +423,7 @@ def check_table(db, logger, table_name, struct_type = nil)
 
   column_array.each do |column|
     if !struct_array.includes? column
-      logger.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
+      LOGGER.info("check_table: ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
       db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
     end
   end
diff --git a/src/invidious/jobs/bypass_captcha_job.cr b/src/invidious/jobs/bypass_captcha_job.cr
index 61f8eaf3..22c54036 100644
--- a/src/invidious/jobs/bypass_captcha_job.cr
+++ b/src/invidious/jobs/bypass_captcha_job.cr
@@ -1,8 +1,7 @@
 class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
-  private getter logger : Invidious::LogHandler
   private getter config : Config
 
-  def initialize(@logger, @config)
+  def initialize(@config)
   end
 
   def begin
@@ -127,7 +126,7 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
           end
         end
       rescue ex
-        logger.error("BypassCaptchaJob: #{ex.message}")
+        LOGGER.error("BypassCaptchaJob: #{ex.message}")
       ensure
         sleep 1.minute
         Fiber.yield
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index 6c858afa..0c6ef79e 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -1,9 +1,8 @@
 class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
   private getter db : DB::Database
-  private getter logger : Invidious::LogHandler
   private getter config : Config
 
-  def initialize(@db, @logger, @config)
+  def initialize(@db, @config)
   end
 
   def begin
@@ -14,37 +13,37 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
     backoff = 1.seconds
 
     loop do
-      logger.debug("RefreshChannelsJob: Refreshing all channels")
+      LOGGER.debug("RefreshChannelsJob: Refreshing all channels")
       db.query("SELECT id FROM channels ORDER BY updated") do |rs|
         rs.each do
           id = rs.read(String)
 
           if active_fibers >= lim_fibers
-            logger.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
+            LOGGER.trace("RefreshChannelsJob: Fiber limit reached, waiting...")
             if active_channel.receive
-              logger.trace("RefreshChannelsJob: Fiber limit ok, continuing")
+              LOGGER.trace("RefreshChannelsJob: Fiber limit ok, continuing")
               active_fibers -= 1
             end
           end
 
-          logger.trace("RefreshChannelsJob: #{id} : Spawning fiber")
+          LOGGER.trace("RefreshChannelsJob: #{id} : Spawning fiber")
           active_fibers += 1
           spawn do
             begin
-              logger.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
-              channel = fetch_channel(id, db, logger, config.full_refresh)
+              LOGGER.trace("RefreshChannelsJob: #{id} fiber : Fetching channel")
+              channel = fetch_channel(id, db, config.full_refresh)
 
               lim_fibers = max_fibers
 
-              logger.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
+              LOGGER.trace("RefreshChannelsJob: #{id} fiber : Updating DB")
               db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
             rescue ex
-              logger.error("RefreshChannelsJob: #{id} : #{ex.message}")
+              LOGGER.error("RefreshChannelsJob: #{id} : #{ex.message}")
               if ex.message == "Deleted or invalid channel"
                 db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
               else
                 lim_fibers = 1
-                logger.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
+                LOGGER.error("RefreshChannelsJob: #{id} fiber : backing off for #{backoff}s")
                 sleep backoff
                 if backoff < 1.days
                   backoff += backoff
@@ -53,14 +52,14 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
                 end
               end
             ensure
-              logger.trace("RefreshChannelsJob: #{id} fiber : Done")
+              LOGGER.trace("RefreshChannelsJob: #{id} fiber : Done")
               active_channel.send(true)
             end
           end
         end
       end
 
-      logger.debug("RefreshChannelsJob: Done, sleeping for one minute")
+      LOGGER.debug("RefreshChannelsJob: Done, sleeping for one minute")
       sleep 1.minute
       Fiber.yield
     end
diff --git a/src/invidious/jobs/refresh_feeds_job.cr b/src/invidious/jobs/refresh_feeds_job.cr
index 208569b8..7b4ccdea 100644
--- a/src/invidious/jobs/refresh_feeds_job.cr
+++ b/src/invidious/jobs/refresh_feeds_job.cr
@@ -1,9 +1,8 @@
 class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
   private getter db : DB::Database
-  private getter logger : Invidious::LogHandler
   private getter config : Config
 
-  def initialize(@db, @logger, @config)
+  def initialize(@db, @config)
   end
 
   def begin
@@ -30,14 +29,14 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
               column_array = get_column_array(db, view_name)
               ChannelVideo.type_array.each_with_index do |name, i|
                 if name != column_array[i]?
-                  logger.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
+                  LOGGER.info("RefreshFeedsJob: DROP MATERIALIZED VIEW #{view_name}")
                   db.exec("DROP MATERIALIZED VIEW #{view_name}")
                   raise "view does not exist"
                 end
               end
 
               if !db.query_one("SELECT pg_get_viewdef('#{view_name}')", as: String).includes? "WHERE ((cv.ucid = ANY (u.subscriptions))"
-                logger.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
+                LOGGER.info("RefreshFeedsJob: Materialized view #{view_name} is out-of-date, recreating...")
                 db.exec("DROP MATERIALIZED VIEW #{view_name}")
               end
 
@@ -49,18 +48,18 @@ class Invidious::Jobs::RefreshFeedsJob < Invidious::Jobs::BaseJob
                 legacy_view_name = "subscriptions_#{sha256(email)[0..7]}"
 
                 db.exec("SELECT * FROM #{legacy_view_name} LIMIT 0")
-                logger.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
+                LOGGER.info("RefreshFeedsJob: RENAME MATERIALIZED VIEW #{legacy_view_name}")
                 db.exec("ALTER MATERIALIZED VIEW #{legacy_view_name} RENAME TO #{view_name}")
               rescue ex
                 begin
                   # While iterating through, we may have an email stored from a deleted account
                   if db.query_one?("SELECT true FROM users WHERE email = $1", email, as: Bool)
-                    logger.info("RefreshFeedsJob: CREATE #{view_name}")
+                    LOGGER.info("RefreshFeedsJob: CREATE #{view_name}")
                     db.exec("CREATE MATERIALIZED VIEW #{view_name} AS #{MATERIALIZED_VIEW_SQL.call(email)}")
                     db.exec("UPDATE users SET feed_needs_update = false WHERE email = $1", email)
                   end
                 rescue ex
-                  logger.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
+                  LOGGER.error("RefreshFeedJobs: REFRESH #{email} : #{ex.message}")
                 end
               end
             end
diff --git a/src/invidious/jobs/subscribe_to_feeds_job.cr b/src/invidious/jobs/subscribe_to_feeds_job.cr
index 2255730d..750aceb8 100644
--- a/src/invidious/jobs/subscribe_to_feeds_job.cr
+++ b/src/invidious/jobs/subscribe_to_feeds_job.cr
@@ -1,10 +1,9 @@
 class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
   private getter db : DB::Database
-  private getter logger : Invidious::LogHandler
   private getter hmac_key : String
   private getter config : Config
 
-  def initialize(@db, @logger, @config, @hmac_key)
+  def initialize(@db, @config, @hmac_key)
   end
 
   def begin
@@ -34,10 +33,10 @@ class Invidious::Jobs::SubscribeToFeedsJob < Invidious::Jobs::BaseJob
               response = subscribe_pubsub(ucid, hmac_key, config)
 
               if response.status_code >= 400
-                logger.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
+                LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{response.body}")
               end
             rescue ex
-              logger.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
+              LOGGER.error("SubscribeToFeedsJob: #{ucid} : #{ex.message}")
             end
 
             active_channel.send(true)
diff --git a/src/invidious/jobs/update_decrypt_function_job.cr b/src/invidious/jobs/update_decrypt_function_job.cr
index 0a6c09c5..6fa0ae1b 100644
--- a/src/invidious/jobs/update_decrypt_function_job.cr
+++ b/src/invidious/jobs/update_decrypt_function_job.cr
@@ -1,15 +1,10 @@
 class Invidious::Jobs::UpdateDecryptFunctionJob < Invidious::Jobs::BaseJob
-  private getter logger : Invidious::LogHandler
-
-  def initialize(@logger)
-  end
-
   def begin
     loop do
       begin
         DECRYPT_FUNCTION.update_decrypt_function
       rescue ex
-        logger.error("UpdateDecryptFunctionJob : #{ex.message}")
+        LOGGER.error("UpdateDecryptFunctionJob : #{ex.message}")
       ensure
         sleep 1.minute
         Fiber.yield
diff --git a/src/invidious/routes/base_route.cr b/src/invidious/routes/base_route.cr
index 2852cb04..37624267 100644
--- a/src/invidious/routes/base_route.cr
+++ b/src/invidious/routes/base_route.cr
@@ -1,7 +1,6 @@
 abstract class Invidious::Routes::BaseRoute
   private getter config : Config
-  private getter logger : Invidious::LogHandler
 
-  def initialize(@config, @logger)
+  def initialize(@config)
   end
 end
diff --git a/src/invidious/routes/login.cr b/src/invidious/routes/login.cr
index 42fb4676..45a6d4d8 100644
--- a/src/invidious/routes/login.cr
+++ b/src/invidious/routes/login.cr
@@ -267,7 +267,7 @@ class Invidious::Routes::Login < Invidious::Routes::BaseRoute
           raise "Couldn't get SID."
         end
 
-        user, sid = get_user(sid, headers, PG_DB, logger)
+        user, sid = get_user(sid, headers, PG_DB)
 
         # We are now logged in
         traceback << "done.<br/>"
diff --git a/src/invidious/routes/watch.cr b/src/invidious/routes/watch.cr
index a5c05c00..65604a88 100644
--- a/src/invidious/routes/watch.cr
+++ b/src/invidious/routes/watch.cr
@@ -62,7 +62,7 @@ class Invidious::Routes::Watch < Invidious::Routes::BaseRoute
     rescue ex : VideoRedirect
       return env.redirect env.request.resource.gsub(id, ex.video_id)
     rescue ex
-      logger.error("get_video: #{id} : #{ex.message}")
+      LOGGER.error("get_video: #{id} : #{ex.message}")
       return error_template(500, ex)
     end
 
diff --git a/src/invidious/routing.cr b/src/invidious/routing.cr
index 602e6ae5..593c7372 100644
--- a/src/invidious/routing.cr
+++ b/src/invidious/routing.cr
@@ -1,14 +1,14 @@
 module Invidious::Routing
   macro get(path, controller, method = :handle)
     get {{ path }} do |env|
-      controller_instance = {{ controller }}.new(config, logger)
+      controller_instance = {{ controller }}.new(config)
       controller_instance.{{ method.id }}(env)
     end
   end
 
   macro post(path, controller, method = :handle)
     post {{ path }} do |env|
-      controller_instance = {{ controller }}.new(config, logger)
+      controller_instance = {{ controller }}.new(config)
       controller_instance.{{ method.id }}(env)
     end
   end
diff --git a/src/invidious/users.cr b/src/invidious/users.cr
index 5dc16edd..6bc2acd2 100644
--- a/src/invidious/users.cr
+++ b/src/invidious/users.cr
@@ -269,12 +269,12 @@ struct Preferences
   end
 end
 
-def get_user(sid, headers, db, logger, refresh = true)
+def get_user(sid, headers, db, refresh = true)
   if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
     user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
 
     if refresh && Time.utc - user.updated > 1.minute
-      user, sid = fetch_user(sid, headers, db, logger)
+      user, sid = fetch_user(sid, headers, db)
       user_array = user.to_a
       user_array[4] = user_array[4].to_json # User preferences
       args = arg_array(user_array)
@@ -292,7 +292,7 @@ def get_user(sid, headers, db, logger, refresh = true)
       end
     end
   else
-    user, sid = fetch_user(sid, headers, db, logger)
+    user, sid = fetch_user(sid, headers, db)
     user_array = user.to_a
     user_array[4] = user_array[4].to_json # User preferences
     args = arg_array(user.to_a)
@@ -313,7 +313,7 @@ def get_user(sid, headers, db, logger, refresh = true)
   return user, sid
 end
 
-def fetch_user(sid, headers, db, logger)
+def fetch_user(sid, headers, db)
   feed = YT_POOL.client &.get("/subscription_manager?disable_polymer=1", headers)
   feed = XML.parse_html(feed.body)
 
@@ -326,7 +326,7 @@ def fetch_user(sid, headers, db, logger)
     end
   end
 
-  channels = get_batch_channels(channels, db, logger, false, false)
+  channels = get_batch_channels(channels, db, false, false)
 
   email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
   if email

From df9e7f284c95c151a006eedf6be2ead49987e778 Mon Sep 17 00:00:00 2001
From: saltycrys <73420320+saltycrys@users.noreply.github.com>
Date: Mon, 4 Jan 2021 17:05:45 +0100
Subject: [PATCH 3/3] Adjust log verbosity

The default log level has been changed from `debug` to `info`.
The `debug` log level is now more verbose. `debug` now gives a general overview
of what is happening (where implemented) while `trace` gives all available
details.
---
 src/invidious/channels.cr                  | 1 +
 src/invidious/helpers/helpers.cr           | 2 +-
 src/invidious/jobs/refresh_channels_job.cr | 4 ++--
 3 files changed, 4 insertions(+), 3 deletions(-)

diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 274e86f9..9986fe1b 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -203,6 +203,7 @@ def get_channel(id, db, refresh = true, pull_all_videos = true)
 end
 
 def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
+  LOGGER.debug("fetch_channel: #{ucid}")
   LOGGER.trace("fetch_channel: #{ucid} : pull_all_videos = #{pull_all_videos}, locale = #{locale}")
 
   LOGGER.trace("fetch_channel: #{ucid} : Downloading RSS feed")
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index cf965dad..e41f8317 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -67,7 +67,7 @@ class Config
   property channel_threads : Int32 = 1             # Number of threads to use for crawling videos from channels (for updating subscriptions)
   property feed_threads : Int32 = 1                # Number of threads to use for updating feeds
   property output : String = "STDOUT"              # Log file path or STDOUT
-  property log_level : LogLevel = LogLevel::Debug  # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
+  property log_level : LogLevel = LogLevel::Info   # Default log level, valid YAML values are ints and strings, see src/invidious/helpers/logger.cr
   property db : DBConfig                           # Database configuration
   property decrypt_polling : Bool = true           # Use polling to keep decryption function up to date
   property full_refresh : Bool = false             # Used for crawling channels: threads should check all videos uploaded by a channel
diff --git a/src/invidious/jobs/refresh_channels_job.cr b/src/invidious/jobs/refresh_channels_job.cr
index 0c6ef79e..3e94a56e 100644
--- a/src/invidious/jobs/refresh_channels_job.cr
+++ b/src/invidious/jobs/refresh_channels_job.cr
@@ -26,7 +26,7 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
             end
           end
 
-          LOGGER.trace("RefreshChannelsJob: #{id} : Spawning fiber")
+          LOGGER.debug("RefreshChannelsJob: #{id} : Spawning fiber")
           active_fibers += 1
           spawn do
             begin
@@ -52,7 +52,7 @@ class Invidious::Jobs::RefreshChannelsJob < Invidious::Jobs::BaseJob
                 end
               end
             ensure
-              LOGGER.trace("RefreshChannelsJob: #{id} fiber : Done")
+              LOGGER.debug("RefreshChannelsJob: #{id} fiber : Done")
               active_channel.send(true)
             end
           end