Fix Style/VerboseBlock issues

This commit is contained in:
syeopite 2021-09-24 19:42:43 -07:00
parent 35d15c7c2b
commit e91421253e
No known key found for this signature in database
GPG key ID: 6FA616E5A5294A82
15 changed files with 44 additions and 44 deletions

View file

@ -655,7 +655,7 @@ get "/subscription_manager" do |env|
end end
subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel) subscriptions = PG_DB.query_all("SELECT * FROM channels WHERE id = ANY(#{values})", as: InvidiousChannel)
subscriptions.sort_by! { |channel| channel.author.downcase } subscriptions.sort_by!(&.author.downcase)
if action_takeout if action_takeout
if format == "json" if format == "json"
@ -759,7 +759,7 @@ post "/data_control" do |env|
body = JSON.parse(body) body = JSON.parse(body)
if body["subscriptions"]? if body["subscriptions"]?
user.subscriptions += body["subscriptions"].as_a.map { |a| a.as_s } user.subscriptions += body["subscriptions"].as_a.map(&.as_s)
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
@ -768,7 +768,7 @@ post "/data_control" do |env|
end end
if body["watch_history"]? if body["watch_history"]?
user.watched += body["watch_history"].as_a.map { |a| a.as_s } user.watched += body["watch_history"].as_a.map(&.as_s)
user.watched.uniq! user.watched.uniq!
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email) PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
end end
@ -876,12 +876,12 @@ post "/data_control" do |env|
File.write(tempfile.path, entry.io.gets_to_end) File.write(tempfile.path, entry.io.gets_to_end)
db = DB.open("sqlite3://" + tempfile.path) db = DB.open("sqlite3://" + tempfile.path)
user.watched += db.query_all("SELECT url FROM streams", as: String).map { |url| url.lchop("https://www.youtube.com/watch?v=") } user.watched += db.query_all("SELECT url FROM streams", as: String).map(&.lchop("https://www.youtube.com/watch?v="))
user.watched.uniq! user.watched.uniq!
PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email) PG_DB.exec("UPDATE users SET watched = $1 WHERE email = $2", user.watched, user.email)
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") } user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map(&.lchop("https://www.youtube.com/channel/"))
user.subscriptions.uniq! user.subscriptions.uniq!
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false) user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)

View file

@ -62,7 +62,7 @@ def get_about_info(ucid, locale)
description_html = HTML.escape(description).gsub("\n", "<br>") description_html = HTML.escape(description).gsub("\n", "<br>")
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map { |a| a.as_s } allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
related_channels = [] of AboutRelatedChannel related_channels = [] of AboutRelatedChannel
else else
@ -84,7 +84,7 @@ def get_about_info(ucid, locale)
description_html = HTML.escape(description).gsub("\n", "<br>") description_html = HTML.escape(description).gsub("\n", "<br>")
is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool is_family_friendly = initdata["microformat"]["microformatDataRenderer"]["familySafe"].as_bool
allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map { |a| a.as_s } allowed_regions = initdata["microformat"]["microformatDataRenderer"]["availableCountries"].as_a.map(&.as_s)
related_channels = initdata["contents"]["twoColumnBrowseResultsRenderer"] related_channels = initdata["contents"]["twoColumnBrowseResultsRenderer"]
.["secondaryContents"]?.try &.["browseSecondaryContentsRenderer"]["contents"][0]? .["secondaryContents"]?.try &.["browseSecondaryContentsRenderer"]["contents"][0]?
@ -149,7 +149,7 @@ def get_about_info(ucid, locale)
end end
end end
end end
tabs = tabs_json.reject { |node| node["tabRenderer"]?.nil? }.map { |node| node["tabRenderer"]["title"].as_s.downcase } tabs = tabs_json.reject { |node| node["tabRenderer"]?.nil? }.map(&.["tabRenderer"].["title"].as_s.downcase)
end end
sub_count = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s? sub_count = initdata["header"]["c4TabbedHeaderRenderer"]?.try &.["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s?

View file

@ -101,7 +101,7 @@ struct ChannelVideo
def to_tuple def to_tuple
{% begin %} {% begin %}
{ {
{{*@type.instance_vars.map { |var| var.name }}} {{*@type.instance_vars.map(&.name)}}
} }
{% end %} {% end %}
end end

View file

@ -255,11 +255,11 @@ def extract_channel_community_cursor(continuation)
.try { |i| Base64.decode(i) } .try { |i| Base64.decode(i) }
.try { |i| IO::Memory.new(i) } .try { |i| IO::Memory.new(i) }
.try { |i| Protodec::Any.parse(i) } .try { |i| Protodec::Any.parse(i) }
.try { |i| i["80226972:0:embedded"]["3:1:base64"].as_h } .try(&.["80226972:0:embedded"]["3:1:base64"].as_h)
if object["53:2:embedded"]?.try &.["3:0:embedded"]? if object["53:2:embedded"]?.try &.["3:0:embedded"]?
object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"] object["53:2:embedded"]["3:0:embedded"]["2:0:string"] = object["53:2:embedded"]["3:0:embedded"]
.try { |i| i["2:0:base64"].as_h } .try(&.["2:0:base64"].as_h)
.try { |i| Protodec::Any.cast_json(i) } .try { |i| Protodec::Any.cast_json(i) }
.try { |i| Protodec::Any.from_json(i) } .try { |i| Protodec::Any.from_json(i) }
.try { |i| Base64.urlsafe_encode(i, padding: false) } .try { |i| Base64.urlsafe_encode(i, padding: false) }

View file

@ -89,14 +89,14 @@ def check_table(db, table_name, struct_type = nil)
struct_array = struct_type.type_array struct_array = struct_type.type_array
column_array = get_column_array(db, table_name) column_array = get_column_array(db, table_name)
column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/) column_types = File.read("config/sql/#{table_name}.sql").match(/CREATE TABLE public\.#{table_name}\n\((?<types>[\d\D]*?)\);/)
.try &.["types"].split(",").map { |line| line.strip }.reject &.starts_with?("CONSTRAINT") .try &.["types"].split(",").map(&.strip).reject &.starts_with?("CONSTRAINT")
return if !column_types return if !column_types
struct_array.each_with_index do |name, i| struct_array.each_with_index do |name, i|
if name != column_array[i]? if name != column_array[i]?
if !column_array[i]? if !column_array[i]?
new_column = column_types.select { |line| line.starts_with? name }[0] new_column = column_types.select(&.starts_with?(name))[0]
LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}") LOGGER.info("check_table: ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
next next
@ -104,14 +104,14 @@ def check_table(db, table_name, struct_type = nil)
# Column doesn't exist # Column doesn't exist
if !column_array.includes? name if !column_array.includes? name
new_column = column_types.select { |line| line.starts_with? name }[0] new_column = column_types.select(&.starts_with?(name))[0]
db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}") db.exec("ALTER TABLE #{table_name} ADD COLUMN #{new_column}")
end end
# Column exists but in the wrong position, rotate # Column exists but in the wrong position, rotate
if struct_array.includes? column_array[i] if struct_array.includes? column_array[i]
until name == column_array[i] until name == column_array[i]
new_column = column_types.select { |line| line.starts_with? column_array[i] }[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new") new_column = column_types.select(&.starts_with?(column_array[i]))[0]?.try &.gsub("#{column_array[i]}", "#{column_array[i]}_new")
# There's a column we didn't expect # There's a column we didn't expect
if !new_column if !new_column

View file

@ -173,7 +173,7 @@ module Kemal
return return
end end
if @cached_files.sum { |element| element[1][:data].bytesize } + (size = File.size(file_path)) < CACHE_LIMIT if @cached_files.sum(&.[1].[:data].bytesize) + (size = File.size(file_path)) < CACHE_LIMIT
data = Bytes.new(size) data = Bytes.new(size)
File.open(file_path) do |file| File.open(file_path) do |file|
file.read(data) file.read(data)

View file

@ -46,7 +46,7 @@ def sign_token(key, hash)
next if key == "signature" next if key == "signature"
if value.is_a?(JSON::Any) && value.as_a? if value.is_a?(JSON::Any) && value.as_a?
value = value.as_a.map { |i| i.as_s } value = value.as_a.map(&.as_s)
end end
case value case value
@ -82,7 +82,7 @@ def validate_request(token, session, request, key, db, locale = nil)
raise InfoException.new("Erroneous token") raise InfoException.new("Erroneous token")
end end
scopes = token["scopes"].as_a.map { |v| v.as_s } scopes = token["scopes"].as_a.map(&.as_s)
scope = "#{request.method}:#{request.path.lchop("/api/v1/auth/").lstrip("/")}" scope = "#{request.method}:#{request.path.lchop("/api/v1/auth/").lstrip("/")}"
if !scopes_include_scope(scopes, scope) if !scopes_include_scope(scopes, scope)
raise InfoException.new("Invalid scope") raise InfoException.new("Invalid scope")
@ -105,11 +105,11 @@ end
def scope_includes_scope(scope, subset) def scope_includes_scope(scope, subset)
methods, endpoint = scope.split(":") methods, endpoint = scope.split(":")
methods = methods.split(";").map { |method| method.upcase }.reject { |method| method.empty? }.sort methods = methods.split(";").map(&.upcase).reject(&.empty?).sort
endpoint = endpoint.downcase endpoint = endpoint.downcase
subset_methods, subset_endpoint = subset.split(":") subset_methods, subset_endpoint = subset.split(":")
subset_methods = subset_methods.split(";").map { |method| method.upcase }.sort subset_methods = subset_methods.split(";").map(&.upcase).sort
subset_endpoint = subset_endpoint.downcase subset_endpoint = subset_endpoint.downcase
if methods.empty? if methods.empty?

View file

@ -72,7 +72,7 @@ def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
videos += next_page.videos videos += next_page.videos
end end
videos.uniq! { |video| video.id } videos.uniq!(&.id)
videos = videos.first(50) videos = videos.first(50)
return Mix.new({ return Mix.new({
title: mix_title, title: mix_title,

View file

@ -339,7 +339,7 @@ module Invidious::Routes::API::V1::Authenticated
callback_url = env.params.body["callbackUrl"]? callback_url = env.params.body["callbackUrl"]?
expire = env.params.body["expire"]?.try &.to_i? expire = env.params.body["expire"]?.try &.to_i?
when "application/json" when "application/json"
scopes = env.params.json["scopes"].as(Array).map { |v| v.as_s } scopes = env.params.json["scopes"].as(Array).map(&.as_s)
callback_url = env.params.json["callbackUrl"]?.try &.as(String) callback_url = env.params.json["callbackUrl"]?.try &.as(String)
expire = env.params.json["expire"]?.try &.as(Int64) expire = env.params.json["expire"]?.try &.as(Int64)
else else

View file

@ -20,7 +20,7 @@ module Invidious::Routes::API::V1::Search
duration = env.params.query["duration"]?.try &.downcase duration = env.params.query["duration"]?.try &.downcase
duration ||= "" duration ||= ""
features = env.params.query["features"]?.try &.split(",").map { |feature| feature.downcase } features = env.params.query["features"]?.try &.split(",").map(&.downcase)
features ||= [] of String features ||= [] of String
content_type = env.params.query["type"]?.try &.downcase content_type = env.params.query["type"]?.try &.downcase

View file

@ -30,7 +30,7 @@ module Invidious::Routes::Channels
end end
end end
items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist)) items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist))
items.each { |item| item.author = "" } items.each(&.author=(""))
else else
sort_options = {"newest", "oldest", "popular"} sort_options = {"newest", "oldest", "popular"}
sort_by ||= "newest" sort_by ||= "newest"
@ -58,7 +58,7 @@ module Invidious::Routes::Channels
items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by) items, continuation = fetch_channel_playlists(channel.ucid, channel.author, continuation, sort_by)
items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) } items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) }
items.each { |item| item.author = "" } items.each(&.author=(""))
templated "playlists" templated "playlists"
end end

View file

@ -183,7 +183,7 @@ def process_search_query(query, page, user, region)
sort = "relevance" sort = "relevance"
subscriptions = nil subscriptions = nil
operators = query.split(" ").select { |a| a.match(/\w+:[\w,]+/) } operators = query.split(" ").select(&.match(/\w+:[\w,]+/))
operators.each do |operator| operators.each do |operator|
key, value = operator.downcase.split(":") key, value = operator.downcase.split(":")

View file

@ -248,17 +248,17 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", args: notifications, as: ChannelVideo) notifications = db.query_all("SELECT * FROM channel_videos WHERE id IN (#{args}) ORDER BY published DESC", args: notifications, as: ChannelVideo)
videos = [] of ChannelVideo videos = [] of ChannelVideo
notifications.sort_by! { |video| video.published }.reverse! notifications.sort_by!(&.published).reverse!
case user.preferences.sort case user.preferences.sort
when "alphabetically" when "alphabetically"
notifications.sort_by! { |video| video.title } notifications.sort_by!(&.title)
when "alphabetically - reverse" when "alphabetically - reverse"
notifications.sort_by! { |video| video.title }.reverse! notifications.sort_by!(&.title).reverse!
when "channel name" when "channel name"
notifications.sort_by! { |video| video.author } notifications.sort_by!(&.author)
when "channel name - reverse" when "channel name - reverse"
notifications.sort_by! { |video| video.author }.reverse! notifications.sort_by!(&.author).reverse!
else nil # Ignore else nil # Ignore
end end
else else
@ -279,7 +279,7 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo) videos = PG_DB.query_all("SELECT DISTINCT ON (ucid) * FROM #{view_name} ORDER BY ucid, published DESC", as: ChannelVideo)
end end
videos.sort_by! { |video| video.published }.reverse! videos.sort_by!(&.published).reverse!
else else
if user.preferences.unseen_only if user.preferences.unseen_only
# Only show unwatched # Only show unwatched
@ -299,15 +299,15 @@ def get_subscription_feed(db, user, max_results = 40, page = 1)
case user.preferences.sort case user.preferences.sort
when "published - reverse" when "published - reverse"
videos.sort_by! { |video| video.published } videos.sort_by!(&.published)
when "alphabetically" when "alphabetically"
videos.sort_by! { |video| video.title } videos.sort_by!(&.title)
when "alphabetically - reverse" when "alphabetically - reverse"
videos.sort_by! { |video| video.title }.reverse! videos.sort_by!(&.title).reverse!
when "channel name" when "channel name"
videos.sort_by! { |video| video.author } videos.sort_by!(&.author)
when "channel name - reverse" when "channel name - reverse"
videos.sort_by! { |video| video.author }.reverse! videos.sort_by!(&.author).reverse!
else nil # Ignore else nil # Ignore
end end

View file

@ -880,7 +880,7 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
primary_results = player_response.try &.["contents"]?.try &.["twoColumnWatchNextResults"]?.try &.["results"]? primary_results = player_response.try &.["contents"]?.try &.["twoColumnWatchNextResults"]?.try &.["results"]?
.try &.["results"]?.try &.["contents"]? .try &.["results"]?.try &.["contents"]?
sentiment_bar = primary_results.try &.as_a.select { |object| object["videoPrimaryInfoRenderer"]? }[0]? sentiment_bar = primary_results.try &.as_a.select(&.["videoPrimaryInfoRenderer"]?)[0]?
.try &.["videoPrimaryInfoRenderer"]? .try &.["videoPrimaryInfoRenderer"]?
.try &.["sentimentBar"]? .try &.["sentimentBar"]?
.try &.["sentimentBarRenderer"]? .try &.["sentimentBarRenderer"]?
@ -891,11 +891,11 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
params["likes"] = JSON::Any.new(likes) params["likes"] = JSON::Any.new(likes)
params["dislikes"] = JSON::Any.new(dislikes) params["dislikes"] = JSON::Any.new(dislikes)
params["descriptionHtml"] = JSON::Any.new(primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]? params["descriptionHtml"] = JSON::Any.new(primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
.try &.["videoSecondaryInfoRenderer"]?.try &.["description"]?.try &.["runs"]? .try &.["videoSecondaryInfoRenderer"]?.try &.["description"]?.try &.["runs"]?
.try &.as_a.try { |t| content_to_comment_html(t).gsub("\n", "<br/>") } || "<p></p>") .try &.as_a.try { |t| content_to_comment_html(t).gsub("\n", "<br/>") } || "<p></p>")
metadata = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]? metadata = primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
.try &.["videoSecondaryInfoRenderer"]? .try &.["videoSecondaryInfoRenderer"]?
.try &.["metadataRowContainer"]? .try &.["metadataRowContainer"]?
.try &.["metadataRowContainerRenderer"]? .try &.["metadataRowContainerRenderer"]?
@ -928,7 +928,7 @@ def extract_video_info(video_id : String, proxy_region : String? = nil, context_
end end
end end
author_info = primary_results.try &.as_a.select { |object| object["videoSecondaryInfoRenderer"]? }[0]? author_info = primary_results.try &.as_a.select(&.["videoSecondaryInfoRenderer"]?)[0]?
.try &.["videoSecondaryInfoRenderer"]?.try &.["owner"]?.try &.["videoOwnerRenderer"]? .try &.["videoSecondaryInfoRenderer"]?.try &.["owner"]?.try &.["videoOwnerRenderer"]?
params["authorThumbnail"] = JSON::Any.new(author_info.try &.["thumbnail"]? params["authorThumbnail"] = JSON::Any.new(author_info.try &.["thumbnail"]?
@ -1023,13 +1023,13 @@ end
def process_video_params(query, preferences) def process_video_params(query, preferences)
annotations = query["iv_load_policy"]?.try &.to_i? annotations = query["iv_load_policy"]?.try &.to_i?
autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe } autoplay = query["autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
comments = query["comments"]?.try &.split(",").map { |a| a.downcase } comments = query["comments"]?.try &.split(",").map(&.downcase)
continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe } continue = query["continue"]?.try { |q| (q == "true" || q == "1").to_unsafe }
continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe } continue_autoplay = query["continue_autoplay"]?.try { |q| (q == "true" || q == "1").to_unsafe }
listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe } listen = query["listen"]?.try { |q| (q == "true" || q == "1").to_unsafe }
local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe } local = query["local"]?.try { |q| (q == "true" || q == "1").to_unsafe }
player_style = query["player_style"]? player_style = query["player_style"]?
preferred_captions = query["subtitles"]?.try &.split(",").map { |a| a.downcase } preferred_captions = query["subtitles"]?.try &.split(",").map(&.downcase)
quality = query["quality"]? quality = query["quality"]?
quality_dash = query["quality_dash"]? quality_dash = query["quality_dash"]?
region = query["region"]? region = query["region"]?

View file

@ -256,7 +256,7 @@ def decrypt_port(p, x)
p = p.gsub(/\b\w+\b/, x) p = p.gsub(/\b\w+\b/, x)
p = p.split(";") p = p.split(";")
p = p.map { |item| item.split("=") } p = p.map(&.split("="))
mapping = {} of String => Int32 mapping = {} of String => Int32
p.each do |item| p.each do |item|