2018-08-04 20:30:44 +00:00
|
|
|
# See http://www.evanmiller.org/how-not-to-sort-by-average-rating.html
|
|
|
|
def ci_lower_bound(pos, n)
|
|
|
|
if n == 0
|
|
|
|
return 0.0
|
|
|
|
end
|
|
|
|
|
|
|
|
# z value here represents a confidence level of 0.95
|
|
|
|
z = 1.96
|
|
|
|
phat = 1.0*pos/n
|
|
|
|
|
|
|
|
return (phat + z*z/(2*n) - z * Math.sqrt((phat*(1 - phat) + z*z/(4*n))/n))/(1 + z*z/n)
|
|
|
|
end
|
|
|
|
|
|
|
|
def elapsed_text(elapsed)
|
|
|
|
millis = elapsed.total_milliseconds
|
|
|
|
return "#{millis.round(2)}ms" if millis >= 1
|
|
|
|
|
|
|
|
"#{(millis * 1000).round(2)}µs"
|
|
|
|
end
|
|
|
|
|
|
|
|
def decode_length_seconds(string)
|
2021-12-12 19:58:45 +00:00
|
|
|
length_seconds = string.gsub(/[^0-9:]/, "")
|
|
|
|
return 0_i32 if length_seconds.empty?
|
|
|
|
|
|
|
|
length_seconds = length_seconds.split(":").map { |x| x.to_i? || 0 }
|
2018-08-04 20:30:44 +00:00
|
|
|
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
2021-12-12 19:58:45 +00:00
|
|
|
|
|
|
|
length_seconds = Time::Span.new(
|
|
|
|
hours: length_seconds[0],
|
|
|
|
minutes: length_seconds[1],
|
|
|
|
seconds: length_seconds[2]
|
|
|
|
).total_seconds.to_i32
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
|
|
return length_seconds
|
|
|
|
end
|
|
|
|
|
2018-10-21 01:37:55 +00:00
|
|
|
def recode_length_seconds(time)
|
|
|
|
if time <= 0
|
|
|
|
return ""
|
|
|
|
else
|
|
|
|
time = time.seconds
|
|
|
|
text = "#{time.minutes.to_s.rjust(2, '0')}:#{time.seconds.to_s.rjust(2, '0')}"
|
|
|
|
|
2019-05-11 15:57:58 +00:00
|
|
|
if time.total_hours.to_i > 0
|
|
|
|
text = "#{time.total_hours.to_i.to_s.rjust(2, '0')}:#{text}"
|
2018-10-21 01:37:55 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
text = text.lchop('0')
|
|
|
|
|
|
|
|
return text
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def decode_time(string)
|
|
|
|
time = string.try &.to_f?
|
|
|
|
|
|
|
|
if !time
|
|
|
|
hours = /(?<hours>\d+)h/.match(string).try &.["hours"].try &.to_f
|
|
|
|
hours ||= 0
|
|
|
|
|
|
|
|
minutes = /(?<minutes>\d+)m(?!s)/.match(string).try &.["minutes"].try &.to_f
|
|
|
|
minutes ||= 0
|
|
|
|
|
|
|
|
seconds = /(?<seconds>\d+)s/.match(string).try &.["seconds"].try &.to_f
|
|
|
|
seconds ||= 0
|
|
|
|
|
|
|
|
millis = /(?<millis>\d+)ms/.match(string).try &.["millis"].try &.to_f
|
|
|
|
millis ||= 0
|
|
|
|
|
2019-06-08 01:23:37 +00:00
|
|
|
time = hours * 3600 + minutes * 60 + seconds + millis // 1000
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
return time
|
|
|
|
end
|
|
|
|
|
|
|
|
def decode_date(string : String)
|
2018-08-05 23:35:52 +00:00
|
|
|
# String matches 'YYYY'
|
2018-08-15 15:22:36 +00:00
|
|
|
if string.match(/^\d{4}/)
|
2019-06-08 01:23:37 +00:00
|
|
|
return Time.utc(string.to_i, 1, 1)
|
2018-08-05 23:35:52 +00:00
|
|
|
end
|
|
|
|
|
2018-08-15 15:22:36 +00:00
|
|
|
# Try to parse as format Jul 10, 2000
|
|
|
|
begin
|
|
|
|
return Time.parse(string, "%b %-d, %Y", Time::Location.local)
|
|
|
|
rescue ex
|
|
|
|
end
|
|
|
|
|
|
|
|
case string
|
|
|
|
when "today"
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc
|
2018-08-15 15:22:36 +00:00
|
|
|
when "yesterday"
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc - 1.day
|
2020-04-09 17:18:09 +00:00
|
|
|
else nil # Continue
|
2018-08-15 15:22:36 +00:00
|
|
|
end
|
|
|
|
|
2018-08-08 15:20:07 +00:00
|
|
|
# String matches format "20 hours ago", "4 months ago"...
|
2018-08-04 20:30:44 +00:00
|
|
|
date = string.split(" ")[-3, 3]
|
|
|
|
delta = date[0].to_i
|
|
|
|
|
|
|
|
case date[1]
|
2018-08-07 13:10:24 +00:00
|
|
|
when .includes? "second"
|
|
|
|
delta = delta.seconds
|
2018-08-04 20:30:44 +00:00
|
|
|
when .includes? "minute"
|
|
|
|
delta = delta.minutes
|
|
|
|
when .includes? "hour"
|
|
|
|
delta = delta.hours
|
|
|
|
when .includes? "day"
|
|
|
|
delta = delta.days
|
|
|
|
when .includes? "week"
|
|
|
|
delta = delta.weeks
|
|
|
|
when .includes? "month"
|
|
|
|
delta = delta.months
|
|
|
|
when .includes? "year"
|
|
|
|
delta = delta.years
|
|
|
|
else
|
|
|
|
raise "Could not parse #{string}"
|
|
|
|
end
|
|
|
|
|
2019-06-08 00:56:41 +00:00
|
|
|
return Time.utc - delta
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
|
2019-02-20 14:49:39 +00:00
|
|
|
def recode_date(time : Time, locale)
|
2019-06-08 00:56:41 +00:00
|
|
|
span = Time.utc - time
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
|
|
if span.total_days > 365.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_years", span.total_days.to_i // 365)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_days > 30.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_months", span.total_days.to_i // 30)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_days > 7.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_weeks", span.total_days.to_i // 7)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_hours > 24.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_days", span.total_days.to_i)
|
2018-08-04 20:30:44 +00:00
|
|
|
elsif span.total_minutes > 60.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_hours", span.total_hours.to_i)
|
2018-08-07 13:10:24 +00:00
|
|
|
elsif span.total_seconds > 60.0
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_minutes", span.total_minutes.to_i)
|
2018-08-04 20:30:44 +00:00
|
|
|
else
|
2021-12-21 23:07:20 +00:00
|
|
|
return translate_count(locale, "generic_count_seconds", span.total_seconds.to_i)
|
2018-08-04 20:30:44 +00:00
|
|
|
end
|
|
|
|
end
|
|
|
|
|
|
|
|
def number_with_separator(number)
|
|
|
|
number.to_s.reverse.gsub(/(\d{3})(?=\d)/, "\\1,").reverse
|
|
|
|
end
|
|
|
|
|
2019-09-13 01:09:23 +00:00
|
|
|
def short_text_to_number(short_text : String) : Int32
|
2019-03-17 14:00:00 +00:00
|
|
|
case short_text
|
|
|
|
when .ends_with? "M"
|
|
|
|
number = short_text.rstrip(" mM").to_f
|
|
|
|
number *= 1000000
|
|
|
|
when .ends_with? "K"
|
|
|
|
number = short_text.rstrip(" kK").to_f
|
|
|
|
number *= 1000
|
|
|
|
else
|
|
|
|
number = short_text.rstrip(" ")
|
|
|
|
end
|
|
|
|
|
|
|
|
number = number.to_i
|
|
|
|
|
|
|
|
return number
|
|
|
|
end
|
|
|
|
|
2018-10-19 16:14:26 +00:00
|
|
|
def number_to_short_text(number)
|
2022-02-07 12:57:14 +00:00
|
|
|
separated = number_with_separator(number).gsub(",", ".").split("")
|
|
|
|
text = separated.first(2).join
|
2018-10-19 16:14:26 +00:00
|
|
|
|
2022-02-07 12:57:14 +00:00
|
|
|
if separated[2]? && separated[2] != "."
|
|
|
|
text += separated[2]
|
2018-10-19 16:14:26 +00:00
|
|
|
end
|
|
|
|
|
|
|
|
text = text.rchop(".0")
|
|
|
|
|
2019-06-08 01:23:37 +00:00
|
|
|
if number // 1_000_000_000 != 0
|
2019-04-14 22:43:44 +00:00
|
|
|
text += "B"
|
2019-06-08 01:23:37 +00:00
|
|
|
elsif number // 1_000_000 != 0
|
2018-10-19 16:14:26 +00:00
|
|
|
text += "M"
|
2019-06-08 01:23:37 +00:00
|
|
|
elsif number // 1000 != 0
|
2018-10-19 16:14:26 +00:00
|
|
|
text += "K"
|
|
|
|
end
|
|
|
|
|
|
|
|
text
|
|
|
|
end
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
def arg_array(array, start = 1)
|
|
|
|
if array.size == 0
|
|
|
|
args = "NULL"
|
|
|
|
else
|
|
|
|
args = [] of String
|
|
|
|
(start..array.size + start - 1).each { |i| args << "($#{i})" }
|
|
|
|
args = args.join(",")
|
|
|
|
end
|
|
|
|
|
|
|
|
return args
|
|
|
|
end
|
2018-08-05 04:07:38 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
def make_host_url(kemal_config)
|
|
|
|
ssl = CONFIG.https_only || kemal_config.ssl
|
|
|
|
port = CONFIG.external_port || kemal_config.port
|
2019-03-05 18:56:59 +00:00
|
|
|
|
2018-08-05 04:07:38 +00:00
|
|
|
if ssl
|
|
|
|
scheme = "https://"
|
|
|
|
else
|
|
|
|
scheme = "http://"
|
|
|
|
end
|
|
|
|
|
2019-03-08 17:37:52 +00:00
|
|
|
# Add if non-standard port
|
|
|
|
if port != 80 && port != 443
|
2021-05-29 03:42:44 +00:00
|
|
|
port = ":#{port}"
|
2019-03-03 17:55:14 +00:00
|
|
|
else
|
2019-03-05 18:56:59 +00:00
|
|
|
port = ""
|
|
|
|
end
|
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
if !CONFIG.domain
|
2019-03-03 17:55:14 +00:00
|
|
|
return ""
|
|
|
|
end
|
2019-03-05 18:56:59 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
host = CONFIG.domain.not_nil!.lchop(".")
|
2019-03-05 18:56:59 +00:00
|
|
|
|
|
|
|
return "#{scheme}#{host}#{port}"
|
2018-08-05 04:07:38 +00:00
|
|
|
end
|
2018-08-09 01:26:02 +00:00
|
|
|
|
2019-06-05 00:58:56 +00:00
|
|
|
def get_referer(env, fallback = "/", unroll = true)
|
2018-08-17 15:19:20 +00:00
|
|
|
referer = env.params.query["referer"]?
|
|
|
|
referer ||= env.request.headers["referer"]?
|
2018-08-09 01:26:02 +00:00
|
|
|
referer ||= fallback
|
|
|
|
|
2018-08-17 15:19:20 +00:00
|
|
|
referer = URI.parse(referer)
|
|
|
|
|
2018-09-06 02:10:32 +00:00
|
|
|
# "Unroll" nested referrers
|
2019-06-05 00:58:56 +00:00
|
|
|
if unroll
|
|
|
|
loop do
|
|
|
|
if referer.query
|
|
|
|
params = HTTP::Params.parse(referer.query.not_nil!)
|
|
|
|
if params["referer"]?
|
2019-09-24 17:31:33 +00:00
|
|
|
referer = URI.parse(URI.decode_www_form(params["referer"]))
|
2019-06-05 00:58:56 +00:00
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
2018-08-17 15:19:20 +00:00
|
|
|
else
|
|
|
|
break
|
|
|
|
end
|
|
|
|
end
|
|
|
|
end
|
|
|
|
|
2021-01-31 18:52:32 +00:00
|
|
|
referer = referer.request_target
|
2020-03-15 21:37:51 +00:00
|
|
|
referer = "/" + referer.gsub(/[^\/?@&%=\-_.0-9a-zA-Z]/, "").lstrip("/\\")
|
2018-08-09 01:26:02 +00:00
|
|
|
|
|
|
|
if referer == env.request.path
|
|
|
|
referer = fallback
|
|
|
|
end
|
|
|
|
|
|
|
|
return referer
|
|
|
|
end
|
2018-09-04 13:52:30 +00:00
|
|
|
|
2018-10-09 13:40:29 +00:00
|
|
|
def sha256(text)
|
|
|
|
digest = OpenSSL::Digest.new("SHA256")
|
|
|
|
digest << text
|
2020-06-15 22:57:20 +00:00
|
|
|
return digest.final.hexstring
|
2018-10-09 13:40:29 +00:00
|
|
|
end
|
2019-06-08 00:56:41 +00:00
|
|
|
|
2021-01-23 18:39:04 +00:00
|
|
|
def subscribe_pubsub(topic, key)
|
2019-06-08 00:56:41 +00:00
|
|
|
case topic
|
|
|
|
when .match(/^UC[A-Za-z0-9_-]{22}$/)
|
|
|
|
topic = "channel_id=#{topic}"
|
2019-06-08 02:39:32 +00:00
|
|
|
when .match(/^(PL|LL|EC|UU|FL|UL|OLAK5uy_)[0-9A-Za-z-_]{10,}$/)
|
2019-06-08 00:56:41 +00:00
|
|
|
# There's a couple missing from the above regex, namely TL and RD, which
|
|
|
|
# don't have feeds
|
|
|
|
topic = "playlist_id=#{topic}"
|
|
|
|
else
|
|
|
|
# TODO
|
|
|
|
end
|
|
|
|
|
|
|
|
time = Time.utc.to_unix.to_s
|
|
|
|
nonce = Random::Secure.hex(4)
|
|
|
|
signature = "#{time}:#{nonce}"
|
|
|
|
|
|
|
|
body = {
|
2020-06-15 22:10:30 +00:00
|
|
|
"hub.callback" => "#{HOST_URL}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
|
2019-06-08 00:56:41 +00:00
|
|
|
"hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?#{topic}",
|
|
|
|
"hub.verify" => "async",
|
|
|
|
"hub.mode" => "subscribe",
|
|
|
|
"hub.lease_seconds" => "432000",
|
|
|
|
"hub.secret" => key.to_s,
|
|
|
|
}
|
|
|
|
|
2020-12-23 05:52:23 +00:00
|
|
|
return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
|
2019-06-08 00:56:41 +00:00
|
|
|
end
|
2019-07-04 20:30:00 +00:00
|
|
|
|
|
|
|
def parse_range(range)
|
|
|
|
if !range
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
|
|
|
|
|
|
|
ranges = range.lchop("bytes=").split(',')
|
2022-01-20 16:17:22 +00:00
|
|
|
ranges.each do |r|
|
|
|
|
start_range, end_range = r.split('-')
|
2019-07-04 20:30:00 +00:00
|
|
|
|
|
|
|
start_range = start_range.to_i64? || 0_i64
|
|
|
|
end_range = end_range.to_i64?
|
|
|
|
|
|
|
|
return start_range, end_range
|
|
|
|
end
|
|
|
|
|
|
|
|
return 0_i64, nil
|
|
|
|
end
|
2019-08-15 16:29:55 +00:00
|
|
|
|
2021-03-27 04:22:46 +00:00
|
|
|
def fetch_random_instance
|
2021-06-14 09:31:51 +00:00
|
|
|
begin
|
2021-10-02 15:16:07 +00:00
|
|
|
instance_api_client = make_client(URI.parse("https://api.invidious.io"))
|
2021-06-14 09:31:51 +00:00
|
|
|
|
|
|
|
# Timeouts
|
|
|
|
instance_api_client.connect_timeout = 10.seconds
|
|
|
|
instance_api_client.dns_timeout = 10.seconds
|
|
|
|
|
|
|
|
instance_list = JSON.parse(instance_api_client.get("/instances.json").body).as_a
|
|
|
|
instance_api_client.close
|
|
|
|
rescue Socket::ConnectError | IO::TimeoutError | JSON::ParseException
|
|
|
|
instance_list = [] of JSON::Any
|
|
|
|
end
|
2021-03-27 04:22:46 +00:00
|
|
|
|
|
|
|
filtered_instance_list = [] of String
|
2021-06-12 22:35:30 +00:00
|
|
|
|
2021-06-14 09:31:51 +00:00
|
|
|
instance_list.each do |data|
|
2021-06-12 22:35:30 +00:00
|
|
|
# TODO Check if current URL is onion instance and use .onion types if so.
|
2021-03-27 04:22:46 +00:00
|
|
|
if data[1]["type"] == "https"
|
2022-02-07 12:57:14 +00:00
|
|
|
# Instances can have statistics disabled, which is an requirement of version validation.
|
|
|
|
# as_nil? doesn't exist. Thus we'll have to handle the error raised if as_nil fails.
|
2021-06-12 22:35:30 +00:00
|
|
|
begin
|
|
|
|
data[1]["stats"].as_nil
|
|
|
|
next
|
|
|
|
rescue TypeCastError
|
|
|
|
end
|
|
|
|
|
|
|
|
# stats endpoint could also lack the software dict.
|
2021-06-14 09:53:53 +00:00
|
|
|
next if data[1]["stats"]["software"]?.nil?
|
2021-06-12 22:35:30 +00:00
|
|
|
|
2021-05-16 03:15:09 +00:00
|
|
|
# Makes sure the instance isn't too outdated.
|
2021-06-12 22:35:30 +00:00
|
|
|
if remote_version = data[1]["stats"]?.try &.["software"]?.try &.["version"]
|
|
|
|
remote_commit_date = remote_version.as_s.match(/\d{4}\.\d{2}\.\d{2}/)
|
|
|
|
next if !remote_commit_date
|
|
|
|
|
|
|
|
remote_commit_date = Time.parse(remote_commit_date[0], "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
local_commit_date = Time.parse(CURRENT_VERSION, "%Y.%m.%d", Time::Location::UTC)
|
|
|
|
|
|
|
|
next if (remote_commit_date - local_commit_date).abs.days > 30
|
2021-05-16 03:15:09 +00:00
|
|
|
|
|
|
|
begin
|
2021-06-14 09:53:53 +00:00
|
|
|
data[1]["monitor"].as_nil
|
2021-05-16 04:10:53 +00:00
|
|
|
health = data[1]["monitor"].as_h["dailyRatios"][0].as_h["ratio"]
|
2021-05-16 03:15:09 +00:00
|
|
|
filtered_instance_list << data[0].as_s if health.to_s.to_f > 90
|
2021-06-12 22:35:30 +00:00
|
|
|
rescue TypeCastError
|
2021-05-16 03:15:09 +00:00
|
|
|
# We can't check the health if the monitoring is broken. Thus we'll just add it to the list
|
2021-06-12 22:35:30 +00:00
|
|
|
# and move on. Ideally we'll ignore any instance that has broken health monitoring but due to the fact that
|
|
|
|
# it's an error that often occurs with all the instances at the same time, we have to just skip the check.
|
2021-05-16 03:15:09 +00:00
|
|
|
filtered_instance_list << data[0].as_s
|
|
|
|
end
|
2021-03-27 04:22:46 +00:00
|
|
|
end
|
2021-03-27 01:35:28 +00:00
|
|
|
end
|
2021-03-27 04:22:46 +00:00
|
|
|
end
|
2021-06-12 22:35:30 +00:00
|
|
|
|
|
|
|
# If for some reason no instances managed to get fetched successfully then we'll just redirect to redirect.invidious.io
|
|
|
|
if filtered_instance_list.size == 0
|
|
|
|
return "redirect.invidious.io"
|
|
|
|
end
|
|
|
|
|
2021-03-27 04:22:46 +00:00
|
|
|
return filtered_instance_list.sample(1)[0]
|
2021-03-27 01:35:28 +00:00
|
|
|
end
|