2019-11-18 22:28:32 +00:00
|
|
|
require "lsquic"
|
2021-05-24 13:45:50 +00:00
|
|
|
require "db"
|
2019-10-25 16:58:16 +00:00
|
|
|
|
2019-11-18 22:28:32 +00:00
|
|
|
def add_yt_headers(request)
|
|
|
|
request.headers["user-agent"] ||= "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.97 Safari/537.36"
|
|
|
|
request.headers["accept-charset"] ||= "ISO-8859-1,utf-8;q=0.7,*;q=0.7"
|
|
|
|
request.headers["accept"] ||= "text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8"
|
|
|
|
request.headers["accept-language"] ||= "en-us,en;q=0.5"
|
2020-05-25 17:52:15 +00:00
|
|
|
return if request.resource.starts_with? "/sorry/index"
|
|
|
|
request.headers["x-youtube-client-name"] ||= "1"
|
2020-06-15 22:33:23 +00:00
|
|
|
request.headers["x-youtube-client-version"] ||= "2.20200609"
|
2021-04-03 00:08:55 +00:00
|
|
|
# Preserve original cookies and add new YT consent cookie for EU servers
|
|
|
|
request.headers["cookie"] = "#{request.headers["cookie"]?}; CONSENT=YES+"
|
2020-05-25 17:52:15 +00:00
|
|
|
if !CONFIG.cookies.empty?
|
|
|
|
request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
|
|
|
|
end
|
2019-10-25 22:02:33 +00:00
|
|
|
end
|
|
|
|
|
2021-04-03 20:11:35 +00:00
|
|
|
struct YoutubeConnectionPool
|
2019-10-25 16:58:16 +00:00
|
|
|
property! url : URI
|
|
|
|
property! capacity : Int32
|
|
|
|
property! timeout : Float64
|
2021-05-24 13:45:50 +00:00
|
|
|
property pool : DB::Pool(QUIC::Client | HTTP::Client)
|
2019-10-25 16:58:16 +00:00
|
|
|
|
2021-04-03 20:11:35 +00:00
|
|
|
def initialize(url : URI, @capacity = 5, @timeout = 5.0, use_quic = true)
|
2019-10-25 16:58:16 +00:00
|
|
|
@url = url
|
2021-04-03 20:11:35 +00:00
|
|
|
@pool = build_pool(use_quic)
|
2019-10-25 16:58:16 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def client(region = nil, &block)
|
2019-11-24 18:41:47 +00:00
|
|
|
if region
|
|
|
|
conn = make_client(url, region)
|
2019-10-26 03:06:08 +00:00
|
|
|
response = yield conn
|
2019-11-24 18:41:47 +00:00
|
|
|
else
|
|
|
|
conn = pool.checkout
|
|
|
|
begin
|
|
|
|
response = yield conn
|
|
|
|
rescue ex
|
2019-11-28 14:19:28 +00:00
|
|
|
conn.close
|
2019-11-24 18:41:47 +00:00
|
|
|
conn = QUIC::Client.new(url)
|
2019-11-28 14:19:28 +00:00
|
|
|
conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
|
|
|
|
conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
|
2019-11-24 18:41:47 +00:00
|
|
|
conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
|
|
|
response = yield conn
|
|
|
|
ensure
|
2021-05-24 13:45:50 +00:00
|
|
|
pool.release(conn)
|
2019-10-26 15:43:28 +00:00
|
|
|
end
|
2019-10-25 16:58:16 +00:00
|
|
|
end
|
2019-11-24 18:41:47 +00:00
|
|
|
|
|
|
|
response
|
2019-10-25 16:58:16 +00:00
|
|
|
end
|
|
|
|
|
2021-04-03 20:11:35 +00:00
|
|
|
private def build_pool(use_quic)
|
2021-05-24 13:45:50 +00:00
|
|
|
DB::Pool(QUIC::Client | HTTP::Client).new(initial_pool_size: 0, max_pool_size: capacity, max_idle_pool_size: capacity, checkout_timeout: timeout) do
|
2021-04-03 20:11:35 +00:00
|
|
|
if use_quic
|
|
|
|
conn = QUIC::Client.new(url)
|
|
|
|
else
|
|
|
|
conn = HTTP::Client.new(url)
|
|
|
|
end
|
2019-11-28 14:19:28 +00:00
|
|
|
conn.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::INET
|
|
|
|
conn.family = Socket::Family::INET if conn.family == Socket::Family::UNSPEC
|
|
|
|
conn.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
|
|
|
conn
|
2019-10-25 16:58:16 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
|
|
|
def ci_lower_bound(pos, n)
|
|
|
|
if n == 0
|
|
|
|
return 0.0
|
|
|
|
end
|
|
|
|
|
|
|
|
# z value here represents a confidence level of 0.95
|
|
|
|
z = 1.96
|
|
|
|
phat = 1.0*pos/n
|
|
|
|
|
|
|
|
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
|
|
|
|
end
|
|
|
|
|
|
|
|
def elapsed_text(elapsed)
|
|
|
|
millis = elapsed.total_milliseconds
|
|
|
|
return "#{millis.round(2)}ms" if millis >= 1
|
|
|
|
|
|
|
|
"#{(millis * 1000).round(2)}µs"
|
|
|
|
end
|
|
|
|
|
2019-06-29 02:17:56 +00:00
|
|
|
def make_client(url : URI, region = nil)
|
2020-03-04 18:05:10 +00:00
|
|
|
# TODO: Migrate any applicable endpoints to QUIC
|
|
|
|
client = HTTPClient.new(url, OpenSSL::SSL::Context::Client.insecure)
|
2019-10-18 16:41:03 +00:00
|
|
|
client.family = (url.host == "www.youtube.com") ? CONFIG.force_resolve : Socket::Family::UNSPEC
|
2020-12-06 03:11:41 +00:00
|
|
|
client.before_request { |r| add_yt_headers(r) } if url.host == "www.youtube.com"
|
2019-10-28 16:34:50 +00:00
|
|
|
client.read_timeout = 10.seconds
|
|
|
|
client.connect_timeout = 10.seconds
|
2018-11-17 23:33:30 +00:00
|
|
|
|
|
|
|
if region
|
2019-06-29 02:17:56 +00:00
|
|
|
PROXY_LIST[region]?.try &.sample(40).each do |proxy|
|
2018-11-17 23:33:30 +00:00
|
|
|
begin
|
|
|
|
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
|
|
|
client.set_proxy(proxy)
|
|
|
|
break
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
return client
|
|
|
|
end
|
|
|
|
|
2020-12-23 05:52:23 +00:00
|
|
|
def make_client(url : URI, region = nil, &block)
|
|
|
|
client = make_client(url, region)
|
|
|
|
begin
|
|
|
|
yield client
|
|
|
|
ensure
|
|
|
|
client.close
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def decode_length_seconds(string)
|
2019-10-26 14:17:25 +00:00
|
|
|
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
|
2018-08-04 20:30:44 +00:00
|
|
|
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
2020-04-09 17:18:09 +00:00
|
|
|
length_seconds = Time::Span.new hours: length_seconds[0], minutes: length_seconds[1], seconds: length_seconds[2]
|
2018-08-04 20:30:44 +00:00
|
|
|
length_seconds = length_seconds.total_seconds.to_i
|
|
|
|
|
|
|
|
return length_seconds
|
|
|
|
end
|
|
|
|
|
2018-10-21 01:37:55 +00:00
|
|
|
def recode_length_seconds(time)
|
|
|
|
if time <= 0
|
|
|
|
return ""
|
|
|
|
else
|
|
|
|
time = time.seconds
|
|
|
|
text = "#{time.minutes.to_s.rjust(2, '0')}:#{time.seconds.to_s.rjust(2, '0')}"
|
|
|
|
|
2019-05-11 15:57:58 +00:00
|
|
|
if time.total_hours.to_i > 0
|
|
|
|
text = "#{time.total_hours.to_i.to_s.rjust(2, '0')}:#{text}"
|
2018-10-21 01:37:55 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
text = text.lchop('0')
|
|
|
|
|
|
|
|
return text
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def decode_time(string)
|
|
|
|
time = string.try &.to_f?
|
|
|
|
|
|
|
|
if !time
|
|
|
|
hours = /(?<hours>\d+)h/.match(string).try &.["hours"].try &.to_f
|
|
|
|
hours ||= 0
|
|
|
|
|
|
|
|
minutes = /(?<minutes>\d+)m(?!s)/.match(string).try &.["minutes"].try &.to_f
|
|
|
|
minutes ||= 0
|
|
|
|
|
|
|
|
seconds = /(?<seconds>\d+)s/.match(string).try &.["seconds"].try &.to_f
|
|
|
|
seconds ||= 0
|
|
|
|
|
|
|
|
millis = /(?<millis>\d+)ms/.match(string).try &.["millis"].try &.to_f
|
|
|
|
millis ||= 0
|
|
|
|
|
2019-06-08 01:23:37 +00:00
|
|
|
time = hours * 3600 + minutes * 60 + seconds + millis // 1000
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
return time
|
|
|
|
end
|
|
|
|
|
|
|
|
def decode_date(string : String)
|
2018-08-05 23:35:52 +00:00
|
|
|
# String matches 'YYYY'
|
2018-08-15 15:22:36 +00:00
|
|
|
if string.match(/^\d{4}/)
|
2019-06-08 01:23:37 +00:00
|
|
|
return Time.utc(string.to_i, 1, 1)
|
2018-08-05 23:35:52 +00:00
|
|
|
end
|
|
|
|
|
2018-08-15 15:22:36 +00:00
|
|
|
# Try to parse as format Jul 10, 2000
|
|
|
|
begin
|
|
|
|
return Time.parse(string, "%b %-d, %Y", Time::Location.local)
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
|
|
|
|
case string
|
|
|
|
when "today"
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc
|
2018-08-15 15:22:36 +00:00
|
|
|
when "yesterday"
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc - 1.day
|
2020-04-09 17:18:09 +00:00
|
|
|
else nil # Continue
|
2018-08-15 15:22:36 +00:00
|
|
|
end
|
|
|
|
|
2018-08-08 15:20:07 +00:00
|
|
|
# String matches format "20 hours ago", "4 months ago"...
|
2018-08-04 20:30:44 +00:00
|
|
|
date = string.split(" ")[-3, 3]
|
|
|
|
delta = date[0].to_i
|
|
|
|
|
|
|
|
case date[1]
|
2018-08-07 13:10:24 +00:00
|
|
|
when .includes? "second"
|
|
|
|
delta = delta.seconds
|
2018-08-04 20:30:44 +00:00
|
|
|
when .includes? "minute"
|
|
|
|
delta = delta.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
delta = delta.hours
|
|
|
|
when .includes? "day"
|
|
|
|
delta = delta.days
|
|
|
|
when .includes? "week"
|
|
|
|
delta = delta.weeks
|
|
|
|
when .includes? "month"
|
|
|
|
delta = delta.months
|
|
|
|
when .includes? "year"
|
|
|
|
delta = delta.years
|
|
|
|
else
|
|
|
|
raise "Could not parse #{string}"
|
|
|
|
end
|
|
|
|
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc - delta
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
2019-02-20 14:49:39 +00:00
|
|
|
def recode_date(time : Time, locale)
|
2019-06-08 00:56:41 +00:00
|
|
|
span = Time.utc - time
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
|
|
if span.total_days > 365.0
|
2019-06-08 01:23:37 +00:00
|
|
|
span = translate(locale, "`x` years", (span.total_days.to_i // 365).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_days > 30.0
|
2019-06-08 01:23:37 +00:00
|
|
|
span = translate(locale, "`x` months", (span.total_days.to_i // 30).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_days > 7.0
|
2019-06-08 01:23:37 +00:00
|
|
|
span = translate(locale, "`x` weeks", (span.total_days.to_i // 7).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_hours > 24.0
|
2019-02-20 15:37:33 +00:00
|
|
|
span = translate(locale, "`x` days", (span.total_days.to_i).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_minutes > 60.0
|
2019-02-20 15:37:33 +00:00
|
|
|
span = translate(locale, "`x` hours", (span.total_hours.to_i).to_s)
|
2018-08-07 13:10:24 +00:00
|
|
|
elsif span.total_seconds > 60.0
|
2019-02-20 15:37:33 +00:00
|
|
|
span = translate(locale, "`x` minutes", (span.total_minutes.to_i).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
else
|
2019-02-20 15:37:33 +00:00
|
|
|
span = translate(locale, "`x` seconds", (span.total_seconds.to_i).to_s)
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
2019-02-20 14:49:39 +00:00
|
|
|
return span
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
def number_with_separator(number)
|
|
|
|
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
|
|
|
|
end
|
|
|
|
|
2019-09-13 01:09:23 +00:00
|
|
|
def short_text_to_number(short_text : String) : Int32
|
2019-03-17 14:00:00 +00:00
|
|
|
case short_text
|
|
|
|
when .ends_with? "M"
|
|
|
|
number = short_text.rstrip(" mM").to_f
|
|
|
|
number *= 1000000
|
|
|
|
when .ends_with? "K"
|
|
|
|
number = short_text.rstrip(" kK").to_f
|
|
|
|
number *= 1000
|
|
|
|
else
|
|
|
|
number = short_text.rstrip(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
number = number.to_i
|
|
|
|
|
|
|
|
return number
|
|
|
|
end
|
|
|
|
|
2018-10-19 16:14:26 +00:00
|
|
|
def number_to_short_text(number)
|
|
|
|
seperated = number_with_separator(number).gsub(",", ".").split("")
|
|
|
|
text = seperated.first(2).join
|
|
|
|
|
|
|
|
if seperated[2]? && seperated[2] != "."
|
|
|
|
text += seperated[2]
|
|
|
|
end
|
|
|
|
|
|
|
|
text = text.rchop(".0")
|
|
|
|
|
2019-06-08 01:23:37 +00:00
|
|
|
if number // 1_000_000_000 != 0
|
2019-04-14 22:43:44 +00:00
|
|
|
text += "B"
|
2019-06-08 01:23:37 +00:00
|
|
|
elsif number // 1_000_000 != 0
|
2018-10-19 16:14:26 +00:00
|
|
|
text += "M"
|
2019-06-08 01:23:37 +00:00
|
|
|
elsif number // 1000 != 0
|
2018-10-19 16:14:26 +00:00
|
|
|
text += "K"
|
|
|
|
end
|
|
|
|
|
|
|
|
text
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def arg_array(array, start = 1)
|
|
|
|
if array.size == 0
|
|
|
|
args = "NULL"
|
|
|
|
else
|
|
|
|
args = [] of String
|
|
|
|
(start..array.size + start - 1).each { |i| args << "($#{i})" }
|
|
|
|
args = args.join(",")
|
|
|
|
end
|
|
|
|
|
|
|
|
return args
|
|
|
|
end
|
2018-08-05 04:07:38 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
def make_host_url(kemal_config)
|
|
|
|
ssl = CONFIG.https_only || kemal_config.ssl
|
|
|
|
port = CONFIG.external_port || kemal_config.port
|
2019-03-05 18:56:59 +00:00
|
|
|
|
2018-08-05 04:07:38 +00:00
|
|
|
if ssl
|
|
|
|
scheme = "https://"
|
|
|
|
else
|
|
|
|
scheme = "http://"
|
|
|
|
end
|
|
|
|
|
2019-03-08 17:37:52 +00:00
|
|
|
# Add if non-standard port
|
|
|
|
if port != 80 && port != 443
|
2021-05-29 03:42:44 +00:00
|
|
|
port = ":#{port}"
|
2019-03-03 17:55:14 +00:00
|
|
|
else
|
2019-03-05 18:56:59 +00:00
|
|
|
port = ""
|
|
|
|
end
|
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
if !CONFIG.domain
|
2019-03-03 17:55:14 +00:00
|
|
|
return ""
|
|
|
|
end
|
2019-03-05 18:56:59 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
host = CONFIG.domain.not_nil!.lchop(".")
|
2019-03-05 18:56:59 +00:00
|
|
|
|
|
|
|
return "#{scheme}#{host}#{port}"
|
2018-08-05 04:07:38 +00:00
|
|
|
end
|
2018-08-09 01:26:02 +00:00
|
|
|
|
2019-06-05 00:58:56 +00:00
|
|
|
def get_referer(env, fallback = "/", unroll = true)
|
2018-08-17 15:19:20 +00:00
|
|
|
referer = env.params.query["referer"]?
|
|
|
|
referer ||= env.request.headers["referer"]?
|
2018-08-09 01:26:02 +00:00
|
|
|
referer ||= fallback
|
|
|
|
|
2018-08-17 15:19:20 +00:00
|
|
|
referer = URI.parse(referer)
|
|
|
|
|
2018-09-06 02:10:32 +00:00
|
|
|
# "Unroll" nested referrers
|
2019-06-05 00:58:56 +00:00
|
|
|
if unroll
|
|
|
|
loop do
|
|
|
|
if referer.query
|
|
|
|
params = HTTP::Params.parse(referer.query.not_nil!)
|
|
|
|
if params["referer"]?
|
2019-09-24 17:31:33 +00:00
|
|
|
referer = URI.parse(URI.decode_www_form(params["referer"]))
|
2019-06-05 00:58:56 +00:00
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
2018-08-17 15:19:20 +00:00
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-31 18:52:32 +00:00
|
|
|
referer = referer.request_target
|
2020-03-15 21:37:51 +00:00
|
|
|
referer = "/" + referer.gsub(/[^\/?@&%=\-_.0-9a-zA-Z]/, "").lstrip("/\\")
|
2018-08-09 01:26:02 +00:00
|
|
|
|
|
|
|
if referer == env.request.path
|
|
|
|
referer = fallback
|
|
|
|
end
|
|
|
|
|
|
|
|
return referer
|
|
|
|
end
|
2018-09-04 13:52:30 +00:00
|
|
|
|
2018-10-09 13:40:29 +00:00
|
|
|
def sha256(text)
|
|
|
|
digest = OpenSSL::Digest.new("SHA256")
|
|
|
|
digest << text
|
2020-06-15 22:57:20 +00:00
|
|
|
return digest.final.hexstring
|
2018-10-09 13:40:29 +00:00
|
|
|
end
|
2019-06-08 00:56:41 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
def subscribe_pubsub(topic, key)
|
2019-06-08 00:56:41 +00:00
|
|
|
case topic
|
|
|
|
when .match(/^UC[A-Za-z0-9_-]{22}$/)
|
|
|
|
topic = "channel_id=#{topic}"
|
2019-06-08 02:39:32 +00:00
|
|
|
when .match(/^(PL|LL|EC|UU|FL|UL|OLAK5uy_)[0-9A-Za-z-_]{10,}$/)
|
2019-06-08 00:56:41 +00:00
|
|
|
# There's a couple missing from the above regex, namely TL and RD, which
|
|
|
|
# don't have feeds
|
|
|
|
topic = "playlist_id=#{topic}"
|
|
|
|
else
|
|
|
|
# TODO
|
|
|
|
end
|
|
|
|
|
|
|
|
time = Time.utc.to_unix.to_s
|
|
|
|
nonce = Random::Secure.hex(4)
|
|
|
|
signature = "#{time}:#{nonce}"
|
|
|
|
|
|
|
|
body = {
|
2020-06-15 22:10:30 +00:00
|
|
|
"hub.callback" => "#{HOST_URL}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
|
2019-06-08 00:56:41 +00:00
|
|
|
"hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?#{topic}",
|
|
|
|
"hub.verify" => "async",
|
|
|
|
"hub.mode" => "subscribe",
|
|
|
|
"hub.lease_seconds" => "432000",
|
|
|
|
"hub.secret" => key.to_s,
|
|
|
|
}
|
|
|
|
|
2020-12-23 05:52:23 +00:00
|
|
|
return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
|
2019-06-08 00:56:41 +00:00
|
|
|
end
|
2019-07-04 20:30:00 +00:00
|
|
|
|
|
|
|
def parse_range(range)
|
|
|
|
if !range
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
|
|
|
|
|
|
|
ranges = range.lchop("bytes=").split(',')
|
|
|
|
ranges.each do |range|
|
|
|
|
start_range, end_range = range.split('-')
|
|
|
|
|
|
|
|
start_range = start_range.to_i64? || 0_i64
|
|
|
|
end_range = end_range.to_i64?
|
|
|
|
|
|
|
|
return start_range, end_range
|
|
|
|
end
|
|
|
|
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
2019-08-15 16:29:55 +00:00
|
|
|
|
|
|
|
def convert_theme(theme)
|
|
|
|
case theme
|
|
|
|
when "true"
|
|
|
|
"dark"
|
|
|
|
when "false"
|
|
|
|
"light"
|
|
|
|
when "", nil
|
|
|
|
nil
|
|
|
|
else
|
|
|
|
theme
|
|
|
|
end
|
|
|
|
end
|
2021-03-27 01:35:28 +00:00
|
|
|
|
2021-03-27 04:22:46 +00:00
|
|
|
def fetch_random_instance
|
|
|
|
instance_list = HTTP::Client.get "https://api.invidious.io/instances.json"
|
|
|
|
instance_list = JSON.parse(instance_list.body)
|
|
|
|
|
|
|
|
filtered_instance_list = [] of String
|
|
|
|
instance_list.as_a.each do |data|
|
|
|
|
if data[1]["type"] == "https"
|
2021-05-16 03:15:09 +00:00
|
|
|
# Makes sure the instance isn't too outdated.
|
|
|
|
remote_version = data[1]["stats"]["software"]["version"]
|
|
|
|
remote_commit_date = remote_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
|
|
|
|
next if !remote_commit_date
|
|
|
|
remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
|
|
|
|
if (remote_commit_date - local_commit_date).abs.days <= 30
|
|
|
|
# as_nil? doesn't exist. Thus we'll have to handle the error rasied if
|
|
|
|
# as_nil fails.
|
|
|
|
begin
|
|
|
|
broken_health_monitoring = data[1]["monitor"].as_nil
|
|
|
|
broken_health_monitoring = true if broken_health_monitoring.nil?
|
|
|
|
rescue TypeCastError
|
|
|
|
broken_health_monitoring = false
|
|
|
|
end
|
|
|
|
|
|
|
|
if !broken_health_monitoring
|
2021-05-16 04:10:53 +00:00
|
|
|
health = data[1]["monitor"].as_h["dailyRatios"][0].as_h["ratio"]
|
2021-05-16 03:15:09 +00:00
|
|
|
filtered_instance_list << data[0].as_s if health.to_s.to_f > 90
|
|
|
|
else
|
|
|
|
# We can't check the health if the monitoring is broken. Thus we'll just add it to the list
|
|
|
|
# and move on
|
|
|
|
filtered_instance_list << data[0].as_s
|
|
|
|
end
|
2021-03-27 04:22:46 +00:00
|
|
|
end
|
2021-03-27 01:35:28 +00:00
|
|
|
end
|
2021-03-27 04:22:46 +00:00
|
|
|
end
|
|
|
|
return filtered_instance_list.sample(1)[0]
|
2021-03-27 01:35:28 +00:00
|
|
|
end
|