From 8c944815bcb1630739f0f5ba1994e051e67527e7 Mon Sep 17 00:00:00 2001
From: Omar Roth
Date: Fri, 7 Jun 2019 19:56:41 -0500
Subject: [PATCH] Minor refactor
---
assets/js/embed.js | 6 +-
assets/js/notifications.js | 11 ++--
assets/js/player.js | 28 +++++-----
assets/js/subscribe_widget.js | 10 ++--
assets/js/watch.js | 30 +++++-----
assets/js/watched_widget.js | 2 +-
src/invidious.cr | 73 ++++++++++++++-----------
src/invidious/channels.cr | 32 ++---------
src/invidious/comments.cr | 5 +-
src/invidious/helpers/helpers.cr | 8 +--
src/invidious/helpers/jobs.cr | 4 +-
src/invidious/helpers/logger.cr | 4 +-
src/invidious/helpers/tokens.cr | 8 +--
src/invidious/helpers/utils.cr | 39 +++++++++++--
src/invidious/playlists.cr | 2 +-
src/invidious/users.cr | 12 ++--
src/invidious/videos.cr | 6 +-
src/invidious/views/components/item.ecr | 12 ++--
18 files changed, 154 insertions(+), 138 deletions(-)
diff --git a/assets/js/embed.js b/assets/js/embed.js
index d2116b2e..283bc06d 100644
--- a/assets/js/embed.js
+++ b/assets/js/embed.js
@@ -1,5 +1,5 @@
-function get_playlist(plid, timeouts = 0) {
- if (timeouts > 10) {
+function get_playlist(plid, timeouts = 1) {
+ if (timeouts >= 10) {
console.log('Failed to pull playlist');
return;
}
@@ -52,7 +52,7 @@ function get_playlist(plid, timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Pulling playlist timed out.');
+ console.log('Pulling playlist timed out... ' + timeouts + '/10');
get_playlist(plid, timeouts++);
}
}
diff --git a/assets/js/notifications.js b/assets/js/notifications.js
index 90b8c4f0..7a112350 100644
--- a/assets/js/notifications.js
+++ b/assets/js/notifications.js
@@ -1,7 +1,7 @@
var notifications, delivered;
-function get_subscriptions(callback, failures = 1) {
- if (failures >= 10) {
+function get_subscriptions(callback, timeouts = 1) {
+ if (timeouts >= 10) {
return
}
@@ -16,16 +16,13 @@ function get_subscriptions(callback, failures = 1) {
if (xhr.status === 200) {
subscriptions = xhr.response;
callback(subscriptions);
- } else {
- console.log('Pulling subscriptions failed... ' + failures + '/10');
- get_subscriptions(callback, failures++)
}
}
}
xhr.ontimeout = function () {
- console.log('Pulling subscriptions failed... ' + failures + '/10');
- get_subscriptions(callback, failures++);
+ console.log('Pulling subscriptions timed out... ' + timeouts + '/10');
+ get_subscriptions(callback, timeouts++);
}
}
diff --git a/assets/js/player.js b/assets/js/player.js
index 82372185..2b546ff4 100644
--- a/assets/js/player.js
+++ b/assets/js/player.js
@@ -1,20 +1,20 @@
var options = {
- preload: "auto",
+ preload: 'auto',
liveui: true,
playbackRates: [0.25, 0.5, 0.75, 1.0, 1.25, 1.5, 2.0],
controlBar: {
children: [
- "playToggle",
- "volumePanel",
- "currentTimeDisplay",
- "timeDivider",
- "durationDisplay",
- "progressControl",
- "remainingTimeDisplay",
- "captionsButton",
- "qualitySelector",
- "playbackRateMenuButton",
- "fullscreenToggle"
+ 'playToggle',
+ 'volumePanel',
+ 'currentTimeDisplay',
+ 'timeDivider',
+ 'durationDisplay',
+ 'progressControl',
+ 'remainingTimeDisplay',
+ 'captionsButton',
+ 'qualitySelector',
+ 'playbackRateMenuButton',
+ 'fullscreenToggle'
]
}
}
@@ -29,7 +29,7 @@ short_url = location.origin + '/' + video_data.id + embed_url.search;
embed_url = location.origin + '/embed/' + video_data.id + embed_url.search;
var shareOptions = {
- socials: ["fbFeed", "tw", "reddit", "email"],
+ socials: ['fbFeed', 'tw', 'reddit', 'email'],
url: short_url,
title: player_data.title,
@@ -38,7 +38,7 @@ var shareOptions = {
embedCode: ""
}
-var player = videojs("player", options, function () {
+var player = videojs('player', options, function () {
this.hotkeys({
volumeStep: 0.1,
seekStep: 5,
diff --git a/assets/js/subscribe_widget.js b/assets/js/subscribe_widget.js
index 8f055e26..7a7f806d 100644
--- a/assets/js/subscribe_widget.js
+++ b/assets/js/subscribe_widget.js
@@ -7,7 +7,7 @@ if (subscribe_button.getAttribute('data-type') === 'subscribe') {
subscribe_button.onclick = unsubscribe;
}
-function subscribe(timeouts = 0) {
+function subscribe(timeouts = 1) {
if (timeouts >= 10) {
console.log('Failed to subscribe.');
return;
@@ -19,7 +19,7 @@ function subscribe(timeouts = 0) {
xhr.responseType = 'json';
xhr.timeout = 20000;
xhr.open('POST', url, true);
- xhr.setRequestHeader("Content-Type", "application/x-www-form-urlencoded");
+ xhr.setRequestHeader('Content-Type', 'application/x-www-form-urlencoded');
xhr.send('csrf_token=' + subscribe_data.csrf_token);
var fallback = subscribe_button.innerHTML;
@@ -36,12 +36,12 @@ function subscribe(timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Subscribing timed out.');
+ console.log('Subscribing timed out... ' + timeouts + '/10');
subscribe(timeouts++);
}
}
-function unsubscribe(timeouts = 0) {
+function unsubscribe(timeouts = 1) {
if (timeouts >= 10) {
console.log('Failed to subscribe');
return;
@@ -70,7 +70,7 @@ function unsubscribe(timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Unsubscribing timed out.');
+ console.log('Unsubscribing timed out... ' + timeouts + '/10');
unsubscribe(timeouts++);
}
}
diff --git a/assets/js/watch.js b/assets/js/watch.js
index c9cac43b..80da3ee6 100644
--- a/assets/js/watch.js
+++ b/assets/js/watch.js
@@ -109,10 +109,10 @@ function number_with_separator(val) {
return val;
}
-function get_playlist(plid, timeouts = 0) {
+function get_playlist(plid, timeouts = 1) {
playlist = document.getElementById('playlist');
- if (timeouts > 10) {
+ if (timeouts >= 10) {
console.log('Failed to pull playlist');
playlist.innerHTML = '';
return;
@@ -175,18 +175,19 @@ function get_playlist(plid, timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Pulling playlist timed out.');
playlist = document.getElementById('playlist');
playlist.innerHTML =
'
';
- get_playlist(plid, timeouts + 1);
+
+ console.log('Pulling playlist timed out... ' + timeouts + '/10');
+ get_playlist(plid, timeouts++);
}
}
-function get_reddit_comments(timeouts = 0) {
+function get_reddit_comments(timeouts = 1) {
comments = document.getElementById('comments');
- if (timeouts > 10) {
+ if (timeouts >= 10) {
console.log('Failed to pull comments');
comments.innerHTML = '';
return;
@@ -238,7 +239,8 @@ function get_reddit_comments(timeouts = 0) {
comments.children[0].children[1].children[0].onclick = swap_comments;
} else {
if (video_data.params.comments[1] === 'youtube') {
- get_youtube_comments(timeouts + 1);
+ console.log('Pulling comments timed out... ' + timeouts + '/10');
+ get_youtube_comments(timeouts++);
} else {
comments.innerHTML = fallback;
}
@@ -247,15 +249,15 @@ function get_reddit_comments(timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Pulling comments timed out.');
- get_reddit_comments(timeouts + 1);
+ console.log('Pulling comments timed out... ' + timeouts + '/10');
+ get_reddit_comments(timeouts++);
}
}
-function get_youtube_comments(timeouts = 0) {
+function get_youtube_comments(timeouts = 1) {
comments = document.getElementById('comments');
- if (timeouts > 10) {
+ if (timeouts >= 10) {
console.log('Failed to pull comments');
comments.innerHTML = '';
return;
@@ -303,7 +305,7 @@ function get_youtube_comments(timeouts = 0) {
comments.children[0].children[1].children[0].onclick = swap_comments;
} else {
if (video_data.params.comments[1] === 'youtube') {
- get_youtube_comments(timeouts + 1);
+ get_youtube_comments(timeouts++);
} else {
comments.innerHTML = '';
}
@@ -312,10 +314,10 @@ function get_youtube_comments(timeouts = 0) {
}
xhr.ontimeout = function () {
- console.log('Pulling comments timed out.');
comments.innerHTML =
'
';
- get_youtube_comments(timeouts + 1);
+ console.log('Pulling comments timed out... ' + timeouts + '/10');
+ get_youtube_comments(timeouts++);
}
}
diff --git a/assets/js/watched_widget.js b/assets/js/watched_widget.js
index 304a7688..280da83a 100644
--- a/assets/js/watched_widget.js
+++ b/assets/js/watched_widget.js
@@ -22,7 +22,7 @@ function mark_watched(target) {
function mark_unwatched(target) {
var tile = target.parentNode.parentNode.parentNode.parentNode.parentNode;
- tile.style.display = "none";
+ tile.style.display = 'none';
var count = document.getElementById('count')
count.innerText = count.innerText - 1;
diff --git a/src/invidious.cr b/src/invidious.cr
index 140001be..dd5bfd9b 100644
--- a/src/invidious.cr
+++ b/src/invidious.cr
@@ -152,7 +152,7 @@ if config.statistics_enabled
},
},
"metadata" => {
- "updatedAt" => Time.now.to_unix,
+ "updatedAt" => Time.utc.to_unix,
"lastChannelRefreshedAt" => PG_DB.query_one?("SELECT updated FROM channels ORDER BY updated DESC LIMIT 1", as: Time).try &.to_unix || 0,
},
}
@@ -1119,7 +1119,7 @@ post "/login" do |env|
if Crypto::Bcrypt::Password.new(user.password.not_nil!) == password.byte_slice(0, 55)
sid = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.now)
+ PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
if Kemal.config.ssl || config.https_only
secure = true
@@ -1128,10 +1128,10 @@ post "/login" do |env|
end
if config.domain
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.now + 2.years,
+ env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.utc + 2.years,
secure: secure, http_only: true)
else
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.now + 2.years,
+ env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
secure: secure, http_only: true)
end
else
@@ -1233,7 +1233,7 @@ post "/login" do |env|
args = arg_array(user_array)
PG_DB.exec("INSERT INTO users VALUES (#{args})", user_array)
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.now)
+ PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", sid, email, Time.utc)
view_name = "subscriptions_#{sha256(user.email)}"
PG_DB.exec("CREATE MATERIALIZED VIEW #{view_name} AS \
@@ -1248,10 +1248,10 @@ post "/login" do |env|
end
if config.domain
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.now + 2.years,
+ env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", domain: "#{config.domain}", value: sid, expires: Time.utc + 2.years,
secure: secure, http_only: true)
else
- env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.now + 2.years,
+ env.response.cookies["SID"] = HTTP::Cookie.new(name: "SID", value: sid, expires: Time.utc + 2.years,
secure: secure, http_only: true)
end
@@ -1476,10 +1476,10 @@ post "/preferences" do |env|
end
if config.domain
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.now + 2.years,
+ env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.utc + 2.years,
secure: secure, http_only: true)
else
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.now + 2.years,
+ env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
secure: secure, http_only: true)
end
end
@@ -1513,10 +1513,10 @@ get "/toggle_theme" do |env|
end
if config.domain
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.now + 2.years,
+ env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", domain: "#{config.domain}", value: preferences, expires: Time.utc + 2.years,
secure: secure, http_only: true)
else
- env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.now + 2.years,
+ env.response.cookies["PREFS"] = HTTP::Cookie.new(name: "PREFS", value: preferences, expires: Time.utc + 2.years,
secure: secure, http_only: true)
end
end
@@ -1719,9 +1719,9 @@ post "/subscription_ajax" do |env|
end
end
- if env.params.query["action_create_subscription_to_channel"]?
+ if env.params.query["action_create_subscription_to_channel"]?.try &.to_i?.try &.== 1
action = "action_create_subscription_to_channel"
- elsif env.params.query["action_remove_subscriptions"]?
+ elsif env.params.query["action_remove_subscriptions"]?.try &.to_i?.try &.== 1
action = "action_remove_subscriptions"
else
next env.redirect referer
@@ -1737,12 +1737,12 @@ post "/subscription_ajax" do |env|
email = user.email
case action
- when .starts_with? "action_create"
+ when "action_create_subscription_to_channel"
if !user.subscriptions.includes? channel_id
get_channel(channel_id, PG_DB, false, false)
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_append(subscriptions, $1) WHERE email = $2", channel_id, email)
end
- when .starts_with? "action_remove"
+ when "action_remove_subscriptions"
PG_DB.exec("UPDATE users SET feed_needs_update = true, subscriptions = array_remove(subscriptions, $1) WHERE email = $2", channel_id, email)
end
@@ -1885,7 +1885,7 @@ post "/data_control" do |env|
env.response.flush
loop do
- env.response.puts %()
+ env.response.puts %()
env.response.flush
sleep (20 + rand(11)).seconds
@@ -2403,7 +2403,7 @@ get "/feed/subscriptions" do |env|
# we know a user has looked at their feed e.g. in the past 10 minutes,
# they've already seen a video posted 20 minutes ago, and don't need
# to be notified.
- PG_DB.exec("UPDATE users SET notifications = $1, updated = $2 WHERE email = $3", [] of String, Time.now,
+ PG_DB.exec("UPDATE users SET notifications = $1, updated = $2 WHERE email = $3", [] of String, Time.utc,
user.email)
user.notifications = [] of String
env.set "user", user
@@ -2439,7 +2439,7 @@ end
get "/feed/channel/:ucid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- env.response.content_type = "application/atom+xml"
+ env.response.content_type = "text/xml; charset=UTF-8"
ucid = env.params.url["ucid"]
@@ -2513,7 +2513,7 @@ end
get "/feed/private" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- env.response.content_type = "application/atom+xml"
+ env.response.content_type = "text/xml; charset=UTF-8"
token = env.params.query["token"]?
@@ -2557,7 +2557,7 @@ end
get "/feed/playlist/:plid" do |env|
locale = LOCALES[env.get("preferences").as(Preferences).locale]?
- env.response.content_type = "application/atom+xml"
+ env.response.content_type = "text/xml; charset=UTF-8"
plid = env.params.url["plid"]
@@ -2608,17 +2608,21 @@ get "/feed/webhook/:token" do |env|
topic = env.params.query["hub.topic"]
challenge = env.params.query["hub.challenge"]
- if verify_token.starts_with? "v1"
+ case verify_token
+ when .starts_with? "v1"
_, time, nonce, signature = verify_token.split(":")
data = "#{time}:#{nonce}"
- else
+ when .starts_with? "v2"
time, signature = verify_token.split(":")
data = "#{time}"
+ else
+ env.response.status_code = 400
+ next
end
# The hub will sometimes check if we're still subscribed after delivery errors,
# so we reply with a 200 as long as the request hasn't expired
- if Time.now.to_unix - time.to_i > 432000
+ if Time.utc.to_unix - time.to_i > 432000
env.response.status_code = 400
next
end
@@ -2628,11 +2632,17 @@ get "/feed/webhook/:token" do |env|
next
end
- ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]
- PG_DB.exec("UPDATE channels SET subscribed = $1 WHERE id = $2", Time.now, ucid)
+ if ucid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["channel_id"]?
+ PG_DB.exec("UPDATE channels SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
+ elsif plid = HTTP::Params.parse(URI.parse(topic).query.not_nil!)["playlist_id"]?
+ PG_DB.exec("UPDATE playlists SET subscribed = $1 WHERE id = $2", Time.utc, ucid)
+ else
+ env.response.status_code = 400
+ next
+ end
env.response.status_code = 200
- next challenge
+ challenge
end
post "/feed/webhook/:token" do |env|
@@ -3217,11 +3227,10 @@ get "/api/v1/insights/:id" do |env|
session_token = body.match(/'XSRF_TOKEN': "(?[A-Za-z0-9\_\-\=]+)"/).not_nil!["session_token"]
post_req = {
- "session_token" => session_token,
+ session_token: session_token,
}
- post_req = HTTP::Params.encode(post_req)
- response = client.post("/insight_ajax?action_get_statistics_and_data=1&v=#{id}", headers, post_req).body
+ response = client.post("/insight_ajax?action_get_statistics_and_data=1&v=#{id}", headers, form: post_req).body
response = XML.parse(response)
html_content = XML.parse_html(response.xpath_node(%q(//html_content)).not_nil!.content)
@@ -3265,16 +3274,14 @@ get "/api/v1/insights/:id" do |env|
avg_view_duration_seconds = html_content.xpath_node(%q(//div[@id="stats-chart-tab-watch-time"]/span/span[2])).not_nil!.content
avg_view_duration_seconds = decode_length_seconds(avg_view_duration_seconds)
- response = {
+ {
"viewCount" => view_count,
"timeWatchedText" => time_watched,
"subscriptionsDriven" => subscriptions_driven,
"shares" => shares,
"avgViewDurationSeconds" => avg_view_duration_seconds,
"graphData" => graph_data,
- }
-
- next response.to_json
+ }.to_json
end
get "/api/v1/annotations/:id" do |env|
diff --git a/src/invidious/channels.cr b/src/invidious/channels.cr
index 55b8046e..b7b6f553 100644
--- a/src/invidious/channels.cr
+++ b/src/invidious/channels.cr
@@ -138,7 +138,7 @@ def get_channel(id, db, refresh = true, pull_all_videos = true)
if db.query_one?("SELECT EXISTS (SELECT true FROM channels WHERE id = $1)", id, as: Bool)
channel = db.query_one("SELECT * FROM channels WHERE id = $1", id, as: InvidiousChannel)
- if refresh && Time.now - channel.updated > 10.minutes
+ if refresh && Time.utc - channel.updated > 10.minutes
channel = fetch_channel(id, db, pull_all_videos: pull_all_videos)
channel_array = channel.to_a
args = arg_array(channel_array)
@@ -219,7 +219,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
id: video_id,
title: title,
published: published,
- updated: Time.now,
+ updated: Time.utc,
ucid: ucid,
author: author,
length_seconds: length_seconds,
@@ -282,7 +282,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
id: video.id,
title: video.title,
published: video.published,
- updated: Time.now,
+ updated: Time.utc,
ucid: video.ucid,
author: video.author,
length_seconds: video.length_seconds,
@@ -296,7 +296,7 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
# We are notified of Red videos elsewhere (PubSub), which includes a correct published date,
# so since they don't provide a published date here we can safely ignore them.
- if Time.now - video.published > 1.minute
+ if Time.utc - video.published > 1.minute
emails = db.query_all("UPDATE users SET notifications = notifications || $1 \
WHERE updated < $2 AND $3 = ANY(subscriptions) AND $1 <> ALL(notifications) RETURNING email",
video.id, video.published, video.ucid, as: String)
@@ -332,31 +332,11 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
db.exec("DELETE FROM channel_videos * WHERE NOT id = ANY ('{#{ids.map { |id| %("#{id}") }.join(",")}}') AND ucid = $1", ucid)
end
- channel = InvidiousChannel.new(ucid, author, Time.now, false, nil)
+ channel = InvidiousChannel.new(ucid, author, Time.utc, false, nil)
return channel
end
-def subscribe_pubsub(ucid, key, config)
- client = make_client(PUBSUB_URL)
- time = Time.now.to_unix.to_s
- nonce = Random::Secure.hex(4)
- signature = "#{time}:#{nonce}"
-
- host_url = make_host_url(config, Kemal.config)
-
- body = {
- "hub.callback" => "#{host_url}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
- "hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?channel_id=#{ucid}",
- "hub.verify" => "async",
- "hub.mode" => "subscribe",
- "hub.lease_seconds" => "432000",
- "hub.secret" => key.to_s,
- }
-
- return client.post("/subscribe", form: body)
-end
-
def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
client = make_client(YT_URL)
@@ -420,7 +400,7 @@ def produce_channel_videos_url(ucid, page = 1, auto_generated = nil, sort_by = "
if auto_generated
seed = Time.unix(1525757349)
- until seed >= Time.now
+ until seed >= Time.utc
seed += 1.month
end
timestamp = seed - (page - 1).months
diff --git a/src/invidious/comments.cr b/src/invidious/comments.cr
index df8d5ca4..a652f84a 100644
--- a/src/invidious/comments.cr
+++ b/src/invidious/comments.cr
@@ -72,9 +72,8 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
end
post_req = {
- "session_token" => session_token,
+ session_token: session_token,
}
- post_req = HTTP::Params.encode(post_req)
client = make_client(YT_URL, proxies, video.info["region"]?)
headers = HTTP::Headers.new
@@ -89,7 +88,7 @@ def fetch_youtube_comments(id, db, continuation, proxies, format, locale, thin_m
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
- response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, post_req)
+ response = client.post("/comment_service_ajax?action_get_comments=1&ctoken=#{continuation}&continuation=#{continuation}&hl=en&gl=US", headers, form: post_req)
response = JSON.parse(response.body)
if !response["response"]["continuationContents"]?
diff --git a/src/invidious/helpers/helpers.cr b/src/invidious/helpers/helpers.cr
index 2dd50d42..5c5d5bb1 100644
--- a/src/invidious/helpers/helpers.cr
+++ b/src/invidious/helpers/helpers.cr
@@ -146,7 +146,7 @@ def rank_videos(db, n)
published = rs.read(Time)
# Exponential decay, older videos tend to rank lower
- temperature = wilson_score * Math.exp(-0.000005*((Time.now - published).total_minutes))
+ temperature = wilson_score * Math.exp(-0.000005*((Time.utc - published).total_minutes))
top << {temperature, id}
end
end
@@ -346,7 +346,7 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
published ||= Time.unix(metadata[0].xpath_node(%q(.//span)).not_nil!["data-timestamp"].to_i64)
rescue ex
end
- published ||= Time.now
+ published ||= Time.utc
begin
view_count = metadata[0].content.rchop(" watching").delete(",").try &.to_i64?
@@ -676,7 +676,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
loop do
time_span = [0, 0, 0, 0]
time_span[rand(4)] = rand(30) + 5
- published = Time.now - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
+ published = Time.utc - Time::Span.new(time_span[0], time_span[1], time_span[2], time_span[3])
video_id = TEST_IDS[rand(TEST_IDS.size)]
video = get_video(video_id, PG_DB, proxies)
@@ -783,7 +783,7 @@ def create_notification_stream(env, proxies, config, kemal_config, decrypt_funct
begin
# Send heartbeat
loop do
- env.response.puts ":keepalive #{Time.now.to_unix}"
+ env.response.puts ":keepalive #{Time.utc.to_unix}"
env.response.puts
env.response.flush
sleep (20 + rand(11)).seconds
diff --git a/src/invidious/helpers/jobs.cr b/src/invidious/helpers/jobs.cr
index ee468d1a..0a5fd7d9 100644
--- a/src/invidious/helpers/jobs.cr
+++ b/src/invidious/helpers/jobs.cr
@@ -22,10 +22,10 @@ def refresh_channels(db, logger, config)
begin
channel = fetch_channel(id, db, config.full_refresh)
- db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.now, channel.author, id)
+ db.exec("UPDATE channels SET updated = $1, author = $2, deleted = false WHERE id = $3", Time.utc, channel.author, id)
rescue ex
if ex.message == "Deleted or invalid channel"
- db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.now, id)
+ db.exec("UPDATE channels SET updated = $1, deleted = true WHERE id = $2", Time.utc, id)
end
logger.write("#{id} : #{ex.message}\n")
end
diff --git a/src/invidious/helpers/logger.cr b/src/invidious/helpers/logger.cr
index 5bb1eb40..8a458a45 100644
--- a/src/invidious/helpers/logger.cr
+++ b/src/invidious/helpers/logger.cr
@@ -5,9 +5,9 @@ class Invidious::LogHandler < Kemal::BaseLogHandler
end
def call(context : HTTP::Server::Context)
- time = Time.now
+ time = Time.utc
call_next(context)
- elapsed_text = elapsed_text(Time.now - time)
+ elapsed_text = elapsed_text(Time.utc - time)
@io << time << ' ' << context.response.status_code << ' ' << context.request.method << ' ' << context.request.resource << ' ' << elapsed_text << '\n'
diff --git a/src/invidious/helpers/tokens.cr b/src/invidious/helpers/tokens.cr
index ba41cba3..31b70c3b 100644
--- a/src/invidious/helpers/tokens.cr
+++ b/src/invidious/helpers/tokens.cr
@@ -1,6 +1,6 @@
def generate_token(email, scopes, expire, key, db)
session = "v1:#{Base64.urlsafe_encode(Random::Secure.random_bytes(32))}"
- PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", session, email, Time.now)
+ PG_DB.exec("INSERT INTO session_ids VALUES ($1, $2, $3)", session, email, Time.utc)
token = {
"session" => session,
@@ -18,7 +18,7 @@ def generate_token(email, scopes, expire, key, db)
end
def generate_response(session, scopes, key, db, expire = 6.hours, use_nonce = false)
- expire = Time.now + expire
+ expire = Time.utc + expire
token = {
"session" => session,
@@ -85,7 +85,7 @@ def validate_request(token, session, request, key, db, locale = nil)
end
if token["nonce"]? && (nonce = db.query_one?("SELECT * FROM nonces WHERE nonce = $1", token["nonce"], as: {String, Time}))
- if nonce[1] > Time.now
+ if nonce[1] > Time.utc
db.exec("UPDATE nonces SET expire = $1 WHERE nonce = $2", Time.new(1990, 1, 1), nonce[0])
else
raise translate(locale, "Erroneous token")
@@ -100,7 +100,7 @@ def validate_request(token, session, request, key, db, locale = nil)
end
expire = token["expire"]?.try &.as_i
- if expire.try &.< Time.now.to_unix
+ if expire.try &.< Time.utc.to_unix
raise translate(locale, "Token is expired, please try again")
end
diff --git a/src/invidious/helpers/utils.cr b/src/invidious/helpers/utils.cr
index fcccb7f9..37cc2eb8 100644
--- a/src/invidious/helpers/utils.cr
+++ b/src/invidious/helpers/utils.cr
@@ -110,9 +110,9 @@ def decode_date(string : String)
case string
when "today"
- return Time.now
+ return Time.utc
when "yesterday"
- return Time.now - 1.day
+ return Time.utc - 1.day
end
# String matches format "20 hours ago", "4 months ago"...
@@ -138,11 +138,11 @@ def decode_date(string : String)
raise "Could not parse #{string}"
end
- return Time.now - delta
+ return Time.utc - delta
end
def recode_date(time : Time, locale)
- span = Time.now - time
+ span = Time.utc - time
if span.total_days > 365.0
span = translate(locale, "`x` years", (span.total_days.to_i / 365).to_s)
@@ -327,3 +327,34 @@ def sha256(text)
digest << text
return digest.hexdigest
end
+
+def subscribe_pubsub(topic, key, config)
+ case topic
+ when .match(/^UC[A-Za-z0-9_-]{22}$/)
+ topic = "channel_id=#{topic}"
+ when .match(/^(?:PL|LL|EC|UU|FL|UL|OLAK5uy_)[0-9A-Za-z-_]{10,}$/)
+ # There's a couple missing from the above regex, namely TL and RD, which
+ # don't have feeds
+ topic = "playlist_id=#{topic}"
+ else
+ # TODO
+ end
+
+ client = make_client(PUBSUB_URL)
+ time = Time.utc.to_unix.to_s
+ nonce = Random::Secure.hex(4)
+ signature = "#{time}:#{nonce}"
+
+ host_url = make_host_url(config, Kemal.config)
+
+ body = {
+ "hub.callback" => "#{host_url}/feed/webhook/v1:#{time}:#{nonce}:#{OpenSSL::HMAC.hexdigest(:sha1, key, signature)}",
+ "hub.topic" => "https://www.youtube.com/xml/feeds/videos.xml?#{topic}",
+ "hub.verify" => "async",
+ "hub.mode" => "subscribe",
+ "hub.lease_seconds" => "432000",
+ "hub.secret" => key.to_s,
+ }
+
+ return client.post("/subscribe", form: body)
+end
diff --git a/src/invidious/playlists.cr b/src/invidious/playlists.cr
index 92d9b977..2b3f731e 100644
--- a/src/invidious/playlists.cr
+++ b/src/invidious/playlists.cr
@@ -208,7 +208,7 @@ def fetch_playlist(plid, locale)
if updated
updated = decode_date(updated)
else
- updated = Time.now
+ updated = Time.utc
end
playlist = Playlist.new(
diff --git a/src/invidious/users.cr b/src/invidious/users.cr
index 4ad11905..37f3b4f1 100644
--- a/src/invidious/users.cr
+++ b/src/invidious/users.cr
@@ -133,7 +133,7 @@ def get_user(sid, headers, db, refresh = true)
if email = db.query_one?("SELECT email FROM session_ids WHERE id = $1", sid, as: String)
user = db.query_one("SELECT * FROM users WHERE email = $1", email, as: User)
- if refresh && Time.now - user.updated > 1.minute
+ if refresh && Time.utc - user.updated > 1.minute
user, sid = fetch_user(sid, headers, db)
user_array = user.to_a
@@ -144,7 +144,7 @@ def get_user(sid, headers, db, refresh = true)
ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", user_array)
db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
- ON CONFLICT (id) DO NOTHING", sid, user.email, Time.now)
+ ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
begin
view_name = "subscriptions_#{sha256(user.email)}"
@@ -166,7 +166,7 @@ def get_user(sid, headers, db, refresh = true)
ON CONFLICT (email) DO UPDATE SET updated = $1, subscriptions = $3", user_array)
db.exec("INSERT INTO session_ids VALUES ($1,$2,$3) \
- ON CONFLICT (id) DO NOTHING", sid, user.email, Time.now)
+ ON CONFLICT (id) DO NOTHING", sid, user.email, Time.utc)
begin
view_name = "subscriptions_#{sha256(user.email)}"
@@ -206,7 +206,7 @@ def fetch_user(sid, headers, db)
token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
- user = User.new(Time.now, [] of String, channels, email, CONFIG.default_user_preferences, nil, token, [] of String, true)
+ user = User.new(Time.utc, [] of String, channels, email, CONFIG.default_user_preferences, nil, token, [] of String, true)
return user, sid
end
@@ -214,7 +214,7 @@ def create_user(sid, email, password)
password = Crypto::Bcrypt::Password.create(password, cost: 10)
token = Base64.urlsafe_encode(Random::Secure.random_bytes(32))
- user = User.new(Time.now, [] of String, [] of String, email, CONFIG.default_user_preferences, password.to_s, token, [] of String, true)
+ user = User.new(Time.utc, [] of String, [] of String, email, CONFIG.default_user_preferences, password.to_s, token, [] of String, true)
return user, sid
end
@@ -314,7 +314,7 @@ def subscribe_ajax(channel_id, action, env_headers)
headers["content-type"] = "application/x-www-form-urlencoded"
post_req = {
- "session_token" => session_token,
+ session_token: session_token,
}
post_url = "/subscription_ajax?#{action}=1&c=#{channel_id}"
diff --git a/src/invidious/videos.cr b/src/invidious/videos.cr
index 8f0fda46..6f3b4d43 100644
--- a/src/invidious/videos.cr
+++ b/src/invidious/videos.cr
@@ -852,7 +852,7 @@ def get_video(id, db, proxies = {} of String => Array({ip: String, port: Int32})
video = db.query_one("SELECT * FROM videos WHERE id = $1", id, as: Video)
# If record was last updated over 10 minutes ago, refresh (expire param in response lasts for 6 hours)
- if (refresh && Time.now - video.updated > 10.minutes) || force_refresh
+ if (refresh && Time.utc - video.updated > 10.minutes) || force_refresh
begin
video = fetch_video(id, proxies, region)
video_array = video.to_a
@@ -1166,7 +1166,7 @@ def fetch_video(id, proxies, region)
wilson_score = ci_lower_bound(likes, likes + dislikes)
published = html.xpath_node(%q(//meta[@itemprop="datePublished"])).try &.["content"]
- published ||= Time.now.to_s("%Y-%m-%d")
+ published ||= Time.utc.to_s("%Y-%m-%d")
published = Time.parse(published, "%Y-%m-%d", Time::Location.local)
allowed_regions = html.xpath_node(%q(//meta[@itemprop="regionsAllowed"])).try &.["content"].split(",")
@@ -1218,7 +1218,7 @@ def fetch_video(id, proxies, region)
author_thumbnail = ""
end
- video = Video.new(id, info, Time.now, title, views, likes, dislikes, wilson_score, published, description,
+ video = Video.new(id, info, Time.utc, title, views, likes, dislikes, wilson_score, published, description,
nil, author, ucid, allowed_regions, is_family_friendly, genre, genre_url, license, sub_count_text, author_thumbnail)
return video
diff --git a/src/invidious/views/components/item.ecr b/src/invidious/views/components/item.ecr
index dc2acac9..b73ce8a1 100644
--- a/src/invidious/views/components/item.ecr
+++ b/src/invidious/views/components/item.ecr
@@ -72,9 +72,9 @@
- <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.now %>
-
<%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.now).ago, locale)) %>
- <% elsif Time.now - item.published > 1.minute %>
+ <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
+ <%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %>
+ <% elsif Time.utc - item.published > 1.minute %>
<%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %>
<% else %>
@@ -121,9 +121,9 @@
- <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.now %>
-
<%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.now).ago, locale)) %>
- <% elsif Time.now - item.published > 1.minute %>
+ <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %>
+ <%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %>
+ <% elsif Time.utc - item.published > 1.minute %>
<%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %>
<% else %>