Add support for polymer redesign

This commit is contained in:
Omar Roth 2020-06-15 17:33:23 -05:00
parent c1cbdae5ee
commit 1eca969cf6
No known key found for this signature in database
GPG Key ID: B8254FB7EC3D37F2
17 changed files with 634 additions and 886 deletions

View File

@ -7,23 +7,6 @@ CREATE TABLE public.videos
id text NOT NULL, id text NOT NULL,
info text, info text,
updated timestamp with time zone, updated timestamp with time zone,
title text,
views bigint,
likes integer,
dislikes integer,
wilson_score double precision,
published timestamp with time zone,
description text,
language text,
author text,
ucid text,
allowed_regions text[],
is_family_friendly boolean,
genre text,
genre_url text,
license text,
sub_count_text text,
author_thumbnail text,
CONSTRAINT videos_pkey PRIMARY KEY (id) CONSTRAINT videos_pkey PRIMARY KEY (id)
); );

View File

@ -27,9 +27,9 @@ describe "Helper" do
describe "#produce_channel_search_url" do describe "#produce_channel_search_url" do
it "correctly produces token for searching a specific channel" do it "correctly produces token for searching a specific channel" do
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "", 100).should eq("/browse_ajax?continuation=4qmFsgI-EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaIEVnWnpaV0Z5WTJnd0FqZ0JZQUZxQUxnQkFIb0RNVEF3WgA%3D&gl=US&hl=en") produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "", 100).should eq("/browse_ajax?continuation=4qmFsgI2EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaGEVnWnpaV0Z5WTJnNEFYb0RNVEF3dUFFQVoA&gl=US&hl=en")
produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "По ожиशुपतिरपि子而時ஸ்றீனி", 0).should eq("/browse_ajax?continuation=4qmFsgJ8EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaIEVnWnpaV0Z5WTJnd0FqZ0JZQUZxQUxnQkFIb0JNQT09Wj7Qn9C-INC-0LbQuOCktuClgeCkquCkpOCkv-CksOCkquCkv-WtkOiAjOaZguCuuOCvjeCuseCvgOCuqeCuvw%3D%3D&gl=US&hl=en") produce_channel_search_url("UCXuqSBlHAE6Xw-yeJA0Tunw", "По ожиशुपतिरपि子而時ஸ்றீனி", 0).should eq("/browse_ajax?continuation=4qmFsgJ0EhhVQ1h1cVNCbEhBRTZYdy15ZUpBMFR1bncaGEVnWnpaV0Z5WTJnNEFYb0JNTGdCQUE9PVo-0J_QviDQvtC20LjgpLbgpYHgpKrgpKTgpL_gpLDgpKrgpL_lrZDogIzmmYLgrrjgr43grrHgr4Dgrqngrr8%3D&gl=US&hl=en")
end end
end end

View File

@ -510,16 +510,16 @@ get "/watch" do |env|
comment_html ||= "" comment_html ||= ""
end end
fmt_stream = video.fmt_stream(decrypt_function) fmt_stream = video.fmt_stream
adaptive_fmts = video.adaptive_fmts(decrypt_function) adaptive_fmts = video.adaptive_fmts
if params.local if params.local
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path } fmt_stream.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path } adaptive_fmts.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
end end
video_streams = video.video_streams(adaptive_fmts) video_streams = video.video_streams
audio_streams = video.audio_streams(adaptive_fmts) audio_streams = video.audio_streams
# Older videos may not have audio sources available. # Older videos may not have audio sources available.
# We redirect here so they're not unplayable # We redirect here so they're not unplayable
@ -549,33 +549,23 @@ get "/watch" do |env|
aspect_ratio = "16:9" aspect_ratio = "16:9"
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
video.description_html = replace_links(video.description_html)
host_url = make_host_url(config, Kemal.config)
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
end
thumbnail = "/vi/#{video.id}/maxres.jpg" thumbnail = "/vi/#{video.id}/maxres.jpg"
if params.raw if params.raw
if params.listen if params.listen
url = audio_streams[0]["url"] url = audio_streams[0]["url"].as_s
audio_streams.each do |fmt| audio_streams.each do |fmt|
if fmt["bitrate"] == params.quality.rchop("k") if fmt["bitrate"].as_i == params.quality.rchop("k").to_i
url = fmt["url"] url = fmt["url"].as_s
end end
end end
else else
url = fmt_stream[0]["url"] url = fmt_stream[0]["url"].as_s
fmt_stream.each do |fmt| fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == params.quality if fmt["quality"].as_s == params.quality
url = fmt["url"] url = fmt["url"].as_s
end end
end end
end end
@ -583,24 +573,6 @@ get "/watch" do |env|
next env.redirect url next env.redirect url
end end
rvs = [] of Hash(String, String)
video.info["rvs"]?.try &.split(",").each do |rv|
rvs << HTTP::Params.parse(rv).to_h
end
rating = video.info["avg_rating"].to_f64
if video.views > 0
engagement = ((video.dislikes.to_f + video.likes.to_f)/video.views * 100)
else
engagement = 0
end
playability_status = video.player_response["playabilityStatus"]?
if playability_status && playability_status["status"] == "LIVE_STREAM_OFFLINE" && !video.premiere_timestamp
reason = playability_status["reason"]?.try &.as_s
end
reason ||= ""
templated "watch" templated "watch"
end end
@ -752,16 +724,16 @@ get "/embed/:id" do |env|
notifications.delete(id) notifications.delete(id)
end end
fmt_stream = video.fmt_stream(decrypt_function) fmt_stream = video.fmt_stream
adaptive_fmts = video.adaptive_fmts(decrypt_function) adaptive_fmts = video.adaptive_fmts
if params.local if params.local
fmt_stream.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path } fmt_stream.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
adaptive_fmts.each { |fmt| fmt["url"] = URI.parse(fmt["url"]).full_path } adaptive_fmts.each { |fmt| fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path) }
end end
video_streams = video.video_streams(adaptive_fmts) video_streams = video.video_streams
audio_streams = video.audio_streams(adaptive_fmts) audio_streams = video.audio_streams
if audio_streams.empty? && !video.live_now if audio_streams.empty? && !video.live_now
if params.quality == "dash" if params.quality == "dash"
@ -788,25 +760,13 @@ get "/embed/:id" do |env|
aspect_ratio = nil aspect_ratio = nil
video.description_html = fill_links(video.description_html, "https", "www.youtube.com")
video.description_html = replace_links(video.description_html)
host_url = make_host_url(config, Kemal.config)
if video.player_response["streamingData"]?.try &.["hlsManifestUrl"]?
hlsvp = video.player_response["streamingData"]["hlsManifestUrl"].as_s
hlsvp = hlsvp.gsub("https://manifest.googlevideo.com", host_url)
end
thumbnail = "/vi/#{video.id}/maxres.jpg" thumbnail = "/vi/#{video.id}/maxres.jpg"
if params.raw if params.raw
url = fmt_stream[0]["url"] url = fmt_stream[0]["url"].as_s
fmt_stream.each do |fmt| fmt_stream.each do |fmt|
if fmt["label"].split(" - ")[0] == params.quality url = fmt["url"].as_s if fmt["quality"].as_s == params.quality
url = fmt["url"]
end
end end
next env.redirect url next env.redirect url
@ -1469,7 +1429,6 @@ post "/login" do |env|
traceback = IO::Memory.new traceback = IO::Memory.new
# See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82 # See https://github.com/ytdl-org/youtube-dl/blob/2019.04.07/youtube_dl/extractor/youtube.py#L82
# TODO: Convert to QUIC
begin begin
client = QUIC::Client.new(LOGIN_URL) client = QUIC::Client.new(LOGIN_URL)
headers = HTTP::Headers.new headers = HTTP::Headers.new
@ -2329,8 +2288,7 @@ get "/modify_notifications" do |env|
end end
headers = cookies.add_request_headers(headers) headers = cookies.add_request_headers(headers)
match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/) if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
if match
session_token = match["session_token"] session_token = match["session_token"]
else else
next env.redirect referer next env.redirect referer
@ -3575,14 +3533,14 @@ get "/channel/:ucid" do |env|
item.author item.author
end end
end end
items = items.select { |item| item.is_a?(SearchPlaylist) }.map { |item| item.as(SearchPlaylist) } items = items.select(&.is_a?(SearchPlaylist)).map(&.as(SearchPlaylist))
items.each { |item| item.author = "" } items.each { |item| item.author = "" }
else else
sort_options = {"newest", "oldest", "popular"} sort_options = {"newest", "oldest", "popular"}
sort_by ||= "newest" sort_by ||= "newest"
items, count = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by) count, items = get_60_videos(channel.ucid, channel.author, page, channel.auto_generated, sort_by)
items.select! { |item| !item.paid } items.reject! &.paid
env.set "search", "channel:#{channel.ucid} " env.set "search", "channel:#{channel.ucid} "
end end
@ -5125,7 +5083,7 @@ get "/api/manifest/dash/id/:id" do |env|
next next
end end
if dashmpd = video.player_response["streamingData"]?.try &.["dashManifestUrl"]?.try &.as_s if dashmpd = video.dash_manifest_url
manifest = YT_POOL.client &.get(URI.parse(dashmpd).full_path).body manifest = YT_POOL.client &.get(URI.parse(dashmpd).full_path).body
manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl| manifest = manifest.gsub(/<BaseURL>[^<]+<\/BaseURL>/) do |baseurl|
@ -5142,16 +5100,16 @@ get "/api/manifest/dash/id/:id" do |env|
next manifest next manifest
end end
adaptive_fmts = video.adaptive_fmts(decrypt_function) adaptive_fmts = video.adaptive_fmts
if local if local
adaptive_fmts.each do |fmt| adaptive_fmts.each do |fmt|
fmt["url"] = URI.parse(fmt["url"]).full_path fmt["url"] = JSON::Any.new(URI.parse(fmt["url"].as_s).full_path)
end end
end end
audio_streams = video.audio_streams(adaptive_fmts) audio_streams = video.audio_streams
video_streams = video.video_streams(adaptive_fmts).sort_by { |stream| {stream["size"].split("x")[0].to_i, stream["fps"].to_i} }.reverse video_streams = video.video_streams.sort_by { |stream| {stream["width"].as_i, stream["fps"].as_i} }.reverse
XML.build(indent: " ", encoding: "UTF-8") do |xml| XML.build(indent: " ", encoding: "UTF-8") do |xml|
xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011", xml.element("MPD", "xmlns": "urn:mpeg:dash:schema:mpd:2011",
@ -5161,24 +5119,22 @@ get "/api/manifest/dash/id/:id" do |env|
i = 0 i = 0
{"audio/mp4", "audio/webm"}.each do |mime_type| {"audio/mp4", "audio/webm"}.each do |mime_type|
mime_streams = audio_streams.select { |stream| stream["type"].starts_with? mime_type } mime_streams = audio_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
if mime_streams.empty? next if mime_streams.empty?
next
end
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true) do
mime_streams.each do |fmt| mime_streams.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"') codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"].to_i * 1000 bandwidth = fmt["bitrate"].as_i
itag = fmt["itag"] itag = fmt["itag"].as_i
url = fmt["url"] url = fmt["url"].as_s
xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do xml.element("Representation", id: fmt["itag"], codecs: codecs, bandwidth: bandwidth) do
xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011", xml.element("AudioChannelConfiguration", schemeIdUri: "urn:mpeg:dash:23003:3:audio_channel_configuration:2011",
value: "2") value: "2")
xml.element("BaseURL") { xml.text url } xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
xml.element("Initialization", range: fmt["init"]) xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
end end
end end
end end
@ -5187,21 +5143,24 @@ get "/api/manifest/dash/id/:id" do |env|
i += 1 i += 1
end end
potential_heights = {4320, 2160, 1440, 1080, 720, 480, 360, 240, 144}
{"video/mp4", "video/webm"}.each do |mime_type| {"video/mp4", "video/webm"}.each do |mime_type|
mime_streams = video_streams.select { |stream| stream["type"].starts_with? mime_type } mime_streams = video_streams.select { |stream| stream["mimeType"].as_s.starts_with? mime_type }
next if mime_streams.empty? next if mime_streams.empty?
heights = [] of Int32 heights = [] of Int32
xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do xml.element("AdaptationSet", id: i, mimeType: mime_type, startWithSAP: 1, subsegmentAlignment: true, scanType: "progressive") do
mime_streams.each do |fmt| mime_streams.each do |fmt|
codecs = fmt["type"].split("codecs=")[1].strip('"') codecs = fmt["mimeType"].as_s.split("codecs=")[1].strip('"')
bandwidth = fmt["bitrate"] bandwidth = fmt["bitrate"].as_i
itag = fmt["itag"] itag = fmt["itag"].as_i
url = fmt["url"] url = fmt["url"].as_s
width, height = fmt["size"].split("x").map { |i| i.to_i } width = fmt["width"].as_i
height = fmt["height"].as_i
# Resolutions reported by YouTube player (may not accurately reflect source) # Resolutions reported by YouTube player (may not accurately reflect source)
height = [4320, 2160, 1440, 1080, 720, 480, 360, 240, 144].sort_by { |i| (height - i).abs }[0] height = potential_heights.min_by { |i| (height - i).abs }
next if unique_res && heights.includes? height next if unique_res && heights.includes? height
heights << height heights << height
@ -5209,8 +5168,8 @@ get "/api/manifest/dash/id/:id" do |env|
startWithSAP: "1", maxPlayoutRate: "1", startWithSAP: "1", maxPlayoutRate: "1",
bandwidth: bandwidth, frameRate: fmt["fps"]) do bandwidth: bandwidth, frameRate: fmt["fps"]) do
xml.element("BaseURL") { xml.text url } xml.element("BaseURL") { xml.text url }
xml.element("SegmentBase", indexRange: fmt["index"]) do xml.element("SegmentBase", indexRange: "#{fmt["indexRange"]["start"]}-#{fmt["indexRange"]["end"]}") do
xml.element("Initialization", range: fmt["init"]) xml.element("Initialization", range: "#{fmt["initRange"]["start"]}-#{fmt["initRange"]["end"]}")
end end
end end
end end
@ -5224,10 +5183,10 @@ get "/api/manifest/dash/id/:id" do |env|
end end
get "/api/manifest/hls_variant/*" do |env| get "/api/manifest/hls_variant/*" do |env|
manifest = YT_POOL.client &.get(env.request.path) response = YT_POOL.client &.get(env.request.path)
if manifest.status_code != 200 if response.status_code != 200
env.response.status_code = manifest.status_code env.response.status_code = response.status_code
next next
end end
@ -5247,10 +5206,10 @@ get "/api/manifest/hls_variant/*" do |env|
end end
get "/api/manifest/hls_playlist/*" do |env| get "/api/manifest/hls_playlist/*" do |env|
manifest = YT_POOL.client &.get(env.request.path) response = YT_POOL.client &.get(env.request.path)
if manifest.status_code != 200 if response.status_code != 200
env.response.status_code = manifest.status_code env.response.status_code = response.status_code
next next
end end
@ -5320,7 +5279,7 @@ get "/latest_version" do |env|
end end
id ||= env.params.query["id"]? id ||= env.params.query["id"]?
itag ||= env.params.query["itag"]? itag ||= env.params.query["itag"]?.try &.to_i
region = env.params.query["region"]? region = env.params.query["region"]?
@ -5335,26 +5294,16 @@ get "/latest_version" do |env|
video = get_video(id, PG_DB, region: region) video = get_video(id, PG_DB, region: region)
fmt_stream = video.fmt_stream(decrypt_function) fmt = video.fmt_stream.find(nil) { |f| f["itag"].as_i == itag } || video.adaptive_fmts.find(nil) { |f| f["itag"].as_i == itag }
adaptive_fmts = video.adaptive_fmts(decrypt_function) url = fmt.try &.["url"]?.try &.as_s
urls = (fmt_stream + adaptive_fmts).select { |fmt| fmt["itag"] == itag } if !url
if urls.empty?
env.response.status_code = 404 env.response.status_code = 404
next next
elsif urls.size > 1
env.response.status_code = 409
next
end end
url = urls[0]["url"] url = URI.parse(url).full_path.not_nil! if local
if local url = "#{url}&title=#{title}" if title
url = URI.parse(url).full_path.not_nil!
end
if title
url += "&title=#{title}"
end
env.redirect url env.redirect url
end end

View File

@ -232,9 +232,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])) nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
if auto_generated if auto_generated
videos = extract_videos(nodeset) videos = extract_videos_html(nodeset)
else else
videos = extract_videos(nodeset, ucid, author) videos = extract_videos_html(nodeset, ucid, author)
end end
end end
@ -317,9 +317,9 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
nodeset = nodeset.not_nil! nodeset = nodeset.not_nil!
if auto_generated if auto_generated
videos = extract_videos(nodeset) videos = extract_videos_html(nodeset)
else else
videos = extract_videos(nodeset, ucid, author) videos = extract_videos_html(nodeset, ucid, author)
end end
count = nodeset.size count = nodeset.size
@ -429,7 +429,7 @@ def fetch_channel_playlists(ucid, author, auto_generated, continuation, sort_by)
if auto_generated if auto_generated
items = extract_shelf_items(nodeset, ucid, author) items = extract_shelf_items(nodeset, ucid, author)
else else
items = extract_items(nodeset, ucid, author) items = extract_items_html(nodeset, ucid, author)
end end
return items, continuation return items, continuation
@ -584,16 +584,8 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
headers = HTTP::Headers.new headers = HTTP::Headers.new
headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"] headers["cookie"] = response.cookies.add_request_headers(headers)["cookie"]
headers["content-type"] = "application/x-www-form-urlencoded"
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ==" session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[^"]+)"/).try &.["session_token"]? || ""
headers["x-spf-previous"] = ""
headers["x-spf-referer"] = ""
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
session_token = response.body.match(/"XSRF_TOKEN":"(?<session_token>[A-Za-z0-9\_\-\=]+)"/).try &.["session_token"]? || ""
post_req = { post_req = {
session_token: session_token, session_token: session_token,
} }
@ -633,13 +625,7 @@ def fetch_channel_community(ucid, continuation, locale, format, thin_mode)
next if !post next if !post
if !post["contentText"]? content_html = post["contentText"]?.try { |t| parse_content(t) } || ""
content_html = ""
else
content_html = post["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
post["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
end
author = post["authorText"]?.try &.["simpleText"]? || "" author = post["authorText"]?.try &.["simpleText"]? || ""
json.object do json.object do
@ -960,7 +946,7 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
2.times do |i| 2.times do |i|
url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by) url = produce_channel_videos_url(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
response = YT_POOL.client &.get(url, headers) response = YT_POOL.client &.get(url)
initial_data = JSON.parse(response.body).as_a.find &.["response"]? initial_data = JSON.parse(response.body).as_a.find &.["response"]?
break if !initial_data break if !initial_data
videos.concat extract_videos(initial_data.as_h) videos.concat extract_videos(initial_data.as_h)
@ -980,7 +966,7 @@ def get_latest_videos(ucid)
document = XML.parse_html(json["content_html"].as_s) document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")])) nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
videos = extract_videos(nodeset, ucid) videos = extract_videos_html(nodeset, ucid)
end end
return videos return videos

View File

@ -59,7 +59,7 @@ end
def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, sort_by = "top") def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, sort_by = "top")
video = get_video(id, db, region: region) video = get_video(id, db, region: region)
session_token = video.info["session_token"]? session_token = video.session_token
case cursor case cursor
when nil, "" when nil, ""
@ -85,17 +85,9 @@ def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, so
session_token: session_token, session_token: session_token,
} }
headers = HTTP::Headers.new headers = HTTP::Headers{
"cookie" => video.cookie,
headers["content-type"] = "application/x-www-form-urlencoded" }
headers["cookie"] = video.info["cookie"]
headers["x-client-data"] = "CIi2yQEIpbbJAQipncoBCNedygEIqKPKAQ=="
headers["x-spf-previous"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
headers["x-spf-referer"] = "https://www.youtube.com/watch?v=#{id}&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999"
headers["x-youtube-client-name"] = "1"
headers["x-youtube-client-version"] = "2.20180719"
response = YT_POOL.client(region, &.post("/comment_service_ajax?action_get_comments=1&hl=en&gl=US", headers, form: post_req)) response = YT_POOL.client(region, &.post("/comment_service_ajax?action_get_comments=1&hl=en&gl=US", headers, form: post_req))
response = JSON.parse(response.body) response = JSON.parse(response.body)
@ -150,8 +142,7 @@ def fetch_youtube_comments(id, db, cursor, format, locale, thin_mode, region, so
node_comment = node["commentRenderer"] node_comment = node["commentRenderer"]
end end
content_html = node_comment["contentText"]["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s || content_html = node_comment["contentText"]?.try { |t| parse_content(t) } || ""
node_comment["contentText"]["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
author = node_comment["authorText"]?.try &.["simpleText"]? || "" author = node_comment["authorText"]?.try &.["simpleText"]? || ""
json.field "author", author json.field "author", author
@ -523,6 +514,11 @@ def fill_links(html, scheme, host)
return html.to_xml(options: XML::SaveOptions::NO_DECL) return html.to_xml(options: XML::SaveOptions::NO_DECL)
end end
def parse_content(content : JSON::Any) : String
content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r).try &.to_s } || ""
end
def content_to_comment_html(content) def content_to_comment_html(content)
comment_html = content.map do |run| comment_html = content.map do |run|
text = HTML.escape(run["text"].as_s) text = HTML.escape(run["text"].as_s)

View File

@ -313,13 +313,149 @@ def html_to_content(description_html : String)
return description return description
end end
def extract_videos(nodeset, ucid = nil, author_name = nil) def extract_videos(initial_data : Hash(String, JSON::Any))
videos = extract_items(nodeset, ucid, author_name) extract_items(initial_data).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
videos.select { |item| item.is_a?(SearchVideo) }.map { |video| video.as(SearchVideo) }
end end
def extract_items(nodeset, ucid = nil, author_name = nil) def extract_items(initial_data : Hash(String, JSON::Any))
# TODO: Make this a 'common', so it makes more sense to be used here items = [] of SearchItem
initial_data.try { |t|
t["contents"]? || t["response"]?
}.try { |t|
t["twoColumnBrowseResultsRenderer"]?.try &.["tabs"].as_a[0]?.try &.["tabRenderer"]["content"] ||
t["twoColumnSearchResultsRenderer"]?.try &.["primaryContents"] ||
t["continuationContents"]?
}.try { |t| t["sectionListRenderer"]? || t["sectionListContinuation"]? }
.try &.["contents"]
.as_a.each { |c|
c.try &.["itemSectionRenderer"]["contents"].as_a
.try { |t| t[0]?.try &.["shelfRenderer"]?.try &.["content"]["expandedShelfContentsRenderer"]?.try &.["items"].as_a || t }
.each { |item|
if i = item["videoRenderer"]?
video_id = i["videoId"].as_s
title = i["title"].try { |t| t["simpleText"]?.try &.as_s || t["runs"]?.try &.as_a.map(&.["text"].as_s).join("") } || ""
author_info = i["ownerText"]?.try &.["runs"].as_a[0]?
author = author_info.try &.["text"].as_s || ""
author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || ""
published = i["publishedTimeText"]?.try &.["simpleText"]?.try { |t| decode_date(t.as_s) } || Time.local
view_count = i["viewCountText"]?.try &.["simpleText"]?.try &.as_s.gsub(/\D+/, "").to_i64? || 0_i64
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
length_seconds = i["lengthText"]?.try &.["simpleText"]?.try &.as_s.try { |t| decode_length_seconds(t) } || 0
live_now = false
paid = false
premium = false
premiere_timestamp = i["upcomingEventData"]?.try &.["startTime"]?.try { |t| Time.unix(t.as_s.to_i64) }
i["badges"]?.try &.as_a.each do |badge|
b = badge["metadataBadgeRenderer"]
case b["label"].as_s
when "LIVE NOW"
live_now = true
when "New", "4K", "CC"
# TODO
when "Premium"
paid = true
# TODO: Potentially available as i["topStandaloneBadge"]["metadataBadgeRenderer"]
premium = true
else nil # Ignore
end
end
items << SearchVideo.new(
title: title,
id: video_id,
author: author,
ucid: author_id,
published: published,
views: view_count,
description_html: description_html,
length_seconds: length_seconds,
live_now: live_now,
paid: paid,
premium: premium,
premiere_timestamp: premiere_timestamp
)
elsif i = item["channelRenderer"]?
author = i["title"]["simpleText"]?.try &.as_s || ""
author_id = i["channelId"]?.try &.as_s || ""
author_thumbnail = i["thumbnail"]["thumbnails"]?.try &.as_a[0]?.try { |u| "https:#{u["url"]}" } || ""
subscriber_count = i["subscriberCountText"]?.try &.["simpleText"]?.try &.as_s.try { |s| short_text_to_number(s.split(" ")[0]) } || 0
auto_generated = false
auto_generated = true if !i["videoCountText"]?
video_count = i["videoCountText"]?.try &.["runs"].as_a[0]?.try &.["text"].as_s.gsub(/\D/, "").to_i || 0
description_html = i["descriptionSnippet"]?.try { |t| parse_content(t) } || ""
items << SearchChannel.new(
author: author,
ucid: author_id,
author_thumbnail: author_thumbnail,
subscriber_count: subscriber_count,
video_count: video_count,
description_html: description_html,
auto_generated: auto_generated,
)
elsif i = item["playlistRenderer"]?
title = i["title"]["simpleText"]?.try &.as_s || ""
plid = i["playlistId"]?.try &.as_s || ""
video_count = i["videoCount"]?.try &.as_s.to_i || 0
playlist_thumbnail = i["thumbnails"].as_a[0]?.try &.["thumbnails"]?.try &.as_a[0]?.try &.["url"].as_s || ""
author_info = i["shortBylineText"]["runs"].as_a[0]?
author = author_info.try &.["text"].as_s || ""
author_id = author_info.try &.["navigationEndpoint"]?.try &.["browseEndpoint"]["browseId"].as_s || ""
videos = i["videos"]?.try &.as_a.map do |v|
v = v["childVideoRenderer"]
v_title = v["title"]["simpleText"]?.try &.as_s || ""
v_id = v["videoId"]?.try &.as_s || ""
v_length_seconds = v["lengthText"]?.try &.["simpleText"]?.try { |t| decode_length_seconds(t.as_s) } || 0
SearchPlaylistVideo.new(
title: v_title,
id: v_id,
length_seconds: v_length_seconds
)
end || [] of SearchPlaylistVideo
# TODO: i["publishedTimeText"]?
items << SearchPlaylist.new(
title: title,
id: plid,
author: author,
ucid: author_id,
video_count: video_count,
videos: videos,
thumbnail: playlist_thumbnail
)
elsif i = item["radioRenderer"]? # Mix
# TODO
elsif i = item["showRenderer"]? # Show
# TODO
elsif i = item["shelfRenderer"]?
elsif i = item["horizontalCardListRenderer"]?
elsif i = item["searchPyvRenderer"]? # Ad
end
}
}
items
end
def extract_videos_html(nodeset, ucid = nil, author_name = nil)
extract_items_html(nodeset, ucid, author_name).select(&.is_a?(SearchVideo)).map(&.as(SearchVideo))
end
def extract_items_html(nodeset, ucid = nil, author_name = nil)
# TODO: Make this a 'CommonItem', so it makes more sense to be used here
items = [] of SearchItem items = [] of SearchItem
nodeset.each do |node| nodeset.each do |node|
@ -456,7 +592,7 @@ def extract_items(nodeset, ucid = nil, author_name = nil)
paid = true paid = true
end end
premiere_timestamp = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/span[@class="localized-date"])).try &.["data-timestamp"]?.try &.to_i64 premiere_timestamp = node.xpath_node(%q(.//ul[@class="yt-lockup-meta-info"]/li/span[@class="localized-date"])).try &.["data-timestamp"]?.try &.to_i64?
if premiere_timestamp if premiere_timestamp
premiere_timestamp = Time.unix(premiere_timestamp) premiere_timestamp = Time.unix(premiere_timestamp)
end end
@ -683,12 +819,12 @@ def check_table(db, logger, table_name, struct_type = nil)
return if column_array.size <= struct_array.size return if column_array.size <= struct_array.size
# column_array.each do |column| column_array.each do |column|
# if !struct_array.includes? column if !struct_array.includes? column
# logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") logger.puts("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
# db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE") db.exec("ALTER TABLE #{table_name} DROP COLUMN #{column} CASCADE")
# end end
# end end
end end
class PG::ResultSet class PG::ResultSet
@ -864,12 +1000,12 @@ def create_notification_stream(env, topics, connection_channel)
end end
end end
def extract_initial_data(body) def extract_initial_data(body) : Hash(String, JSON::Any)
initial_data = body.match(/window\["ytInitialData"\] = (?<info>.*?);\n/).try &.["info"] || "{}" initial_data = body.match(/window\["ytInitialData"\]\s*=\s*(?<info>.*?);+\n/).try &.["info"] || "{}"
if initial_data.starts_with?("JSON.parse(\"") if initial_data.starts_with?("JSON.parse(\"")
return JSON.parse(JSON.parse(%({"initial_data":"#{initial_data[12..-3]}"}))["initial_data"].as_s) return JSON.parse(JSON.parse(%({"initial_data":"#{initial_data[12..-3]}"}))["initial_data"].as_s).as_h
else else
return JSON.parse(initial_data) return JSON.parse(initial_data).as_h
end end
end end

View File

@ -201,7 +201,7 @@ end
def bypass_captcha(captcha_key, logger) def bypass_captcha(captcha_key, logger)
loop do loop do
begin begin
{"/watch?v=CvFH_6DNRCY&gl=US&hl=en&disable_polymer=1&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: "UCXuqSBlHAE6Xw-yeJA0Tunw")}.each do |path| {"/watch?v=CvFH_6DNRCY&gl=US&hl=en&has_verified=1&bpctr=9999999999", produce_channel_videos_url(ucid: "UCXuqSBlHAE6Xw-yeJA0Tunw")}.each do |path|
response = YT_POOL.client &.get(path) response = YT_POOL.client &.get(path)
if response.body.includes?("To continue with your YouTube experience, please fill out the form below.") if response.body.includes?("To continue with your YouTube experience, please fill out the form below.")
html = XML.parse_html(response.body) html = XML.parse_html(response.body)

View File

@ -1,8 +1,8 @@
alias SigProc = Proc(Array(String), Int32, Array(String)) alias SigProc = Proc(Array(String), Int32, Array(String))
def fetch_decrypt_function(id = "CvFH_6DNRCY") def fetch_decrypt_function(id = "CvFH_6DNRCY")
document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en&disable_polymer=1").body document = YT_POOL.client &.get("/watch?v=#{id}&gl=US&hl=en").body
url = document.match(/src="(?<url>.*player_ias[^\/]+\/en_US\/base.js)"/).not_nil!["url"] url = document.match(/src="(?<url>\/yts\/jsbin\/player_ias-[^\/]+\/en_US\/base.js)"/).not_nil!["url"]
player = YT_POOL.client &.get(url).body player = YT_POOL.client &.get(url).body
function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"] function_name = player.match(/^(?<name>[^=]+)=function\(\w\){\w=\w\.split\(""\);[^\. ]+\.[^( ]+/m).not_nil!["name"]

View File

@ -8,7 +8,7 @@ def add_yt_headers(request)
request.headers["accept-language"] ||= "en-us,en;q=0.5" request.headers["accept-language"] ||= "en-us,en;q=0.5"
return if request.resource.starts_with? "/sorry/index" return if request.resource.starts_with? "/sorry/index"
request.headers["x-youtube-client-name"] ||= "1" request.headers["x-youtube-client-name"] ||= "1"
request.headers["x-youtube-client-version"] ||= "1.20180719" request.headers["x-youtube-client-version"] ||= "2.20200609"
if !CONFIG.cookies.empty? if !CONFIG.cookies.empty?
request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}" request.headers["cookie"] = "#{(CONFIG.cookies.map { |c| "#{c.name}=#{c.value}" }).join("; ")}; #{request.headers["cookie"]?}"
end end

View File

@ -20,7 +20,6 @@ end
def fetch_mix(rdid, video_id, cookies = nil, locale = nil) def fetch_mix(rdid, video_id, cookies = nil, locale = nil)
headers = HTTP::Headers.new headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/69.0.3497.100 Safari/537.36"
if cookies if cookies
headers = cookies.add_request_headers(headers) headers = cookies.add_request_headers(headers)

View File

@ -96,6 +96,10 @@ struct SearchVideo
end end
end end
def is_upcoming
premiere_timestamp ? true : false
end
db_mapping({ db_mapping({
title: String, title: String,
id: String, id: String,
@ -227,61 +231,35 @@ end
alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist alias SearchItem = SearchVideo | SearchChannel | SearchPlaylist
def channel_search(query, page, channel) def channel_search(query, page, channel)
response = YT_POOL.client &.get("/channel/#{channel}?disable_polymer=1&hl=en&gl=US") response = YT_POOL.client &.get("/channel/#{channel}?hl=en&gl=US")
document = XML.parse_html(response.body) response = YT_POOL.client &.get("/user/#{channel}?hl=en&gl=US") if response.headers["location"]?
canonical = document.xpath_node(%q(//link[@rel="canonical"])) response = YT_POOL.client &.get("/c/#{channel}?hl=en&gl=US") if response.headers["location"]?
if !canonical ucid = response.body.match(/\\"channelId\\":\\"(?<ucid>[^\\]+)\\"/).try &.["ucid"]?
response = YT_POOL.client &.get("/c/#{channel}?disable_polymer=1&hl=en&gl=US")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
end
if !canonical return 0, [] of SearchItem if !ucid
response = YT_POOL.client &.get("/user/#{channel}?disable_polymer=1&hl=en&gl=US")
document = XML.parse_html(response.body)
canonical = document.xpath_node(%q(//link[@rel="canonical"]))
end
if !canonical
return 0, [] of SearchItem
end
ucid = canonical["href"].split("/")[-1]
url = produce_channel_search_url(ucid, query, page) url = produce_channel_search_url(ucid, query, page)
response = YT_POOL.client &.get(url) response = YT_POOL.client &.get(url)
json = JSON.parse(response.body) initial_data = JSON.parse(response.body).as_a.find &.["response"]?
return 0, [] of SearchItem if !initial_data
items = extract_items(initial_data.as_h)
if json["content_html"]? && !json["content_html"].as_s.empty? return items.size, items
document = XML.parse_html(json["content_html"].as_s)
nodeset = document.xpath_nodes(%q(//li[contains(@class, "feed-item-container")]))
count = nodeset.size
items = extract_items(nodeset)
else
count = 0
items = [] of SearchItem
end
return count, items
end end
def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil) def search(query, page = 1, search_params = produce_search_params(content_type: "all"), region = nil)
if query.empty? return 0, [] of SearchItem if query.empty?
return {0, [] of SearchItem}
end
html = YT_POOL.client(region, &.get("/results?q=#{URI.encode_www_form(query)}&page=#{page}&sp=#{search_params}&hl=en&disable_polymer=1").body) body = YT_POOL.client(region, &.get("/results?q=#{URI.encode_www_form(query)}&page=#{page}&sp=#{search_params}&hl=en").body)
if html.empty? return 0, [] of SearchItem if body.empty?
return {0, [] of SearchItem}
end
html = XML.parse_html(html) initial_data = extract_initial_data(body)
nodeset = html.xpath_nodes(%q(//ol[@class="item-section"]/li)) items = extract_items(initial_data)
items = extract_items(nodeset)
return {nodeset.size, items} # initial_data["estimatedResults"]?.try &.as_s.to_i64
return items.size, items
end end
def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "", def produce_search_params(sort : String = "relevance", date : String = "", content_type : String = "",
@ -387,12 +365,9 @@ def produce_channel_search_url(ucid, query, page)
"2:string" => ucid, "2:string" => ucid,
"3:base64" => { "3:base64" => {
"2:string" => "search", "2:string" => "search",
"6:varint" => 2_i64,
"7:varint" => 1_i64, "7:varint" => 1_i64,
"12:varint" => 1_i64,
"13:string" => "",
"23:varint" => 0_i64,
"15:string" => "#{page}", "15:string" => "#{page}",
"23:varint" => 0_i64,
}, },
"11:string" => query, "11:string" => query,
}, },

View File

@ -1,7 +1,4 @@
def fetch_trending(trending_type, region, locale) def fetch_trending(trending_type, region, locale)
headers = HTTP::Headers.new
headers["User-Agent"] = "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/68.0.3440.106 Safari/537.36"
region ||= "US" region ||= "US"
region = region.upcase region = region.upcase
@ -11,7 +8,7 @@ def fetch_trending(trending_type, region, locale)
if trending_type && trending_type != "Default" if trending_type && trending_type != "Default"
trending_type = trending_type.downcase.capitalize trending_type = trending_type.downcase.capitalize
response = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en", headers).body response = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
initial_data = extract_initial_data(response) initial_data = extract_initial_data(response)
@ -21,31 +18,28 @@ def fetch_trending(trending_type, region, locale)
if url if url
url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"] url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"]
url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s url = url["channelListSubMenuAvatarRenderer"]["navigationEndpoint"]["commandMetadata"]["webCommandMetadata"]["url"].as_s
url += "&disable_polymer=1&gl=#{region}&hl=en" url = "#{url}&gl=#{region}&hl=en"
trending = YT_POOL.client &.get(url).body trending = YT_POOL.client &.get(url).body
plid = extract_plid(url) plid = extract_plid(url)
else else
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
end end
else else
trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en&disable_polymer=1").body trending = YT_POOL.client &.get("/feed/trending?gl=#{region}&hl=en").body
end end
trending = XML.parse_html(trending) initial_data = extract_initial_data(trending)
nodeset = trending.xpath_nodes(%q(//ul/li[@class="expanded-shelf-content-item-wrapper"])) trending = extract_videos(initial_data)
trending = extract_videos(nodeset)
return {trending, plid} return {trending, plid}
end end
def extract_plid(url) def extract_plid(url)
plid = URI.parse(url) return url.try { |i| URI.parse(i).query }
.try { |i| HTTP::Params.parse(i.query.not_nil!)["bp"] } .try { |i| HTTP::Params.parse(i)["bp"] }
.try { |i| URI.decode_www_form(i) } .try { |i| URI.decode_www_form(i) }
.try { |i| Base64.decode(i) } .try { |i| Base64.decode(i) }
.try { |i| IO::Memory.new(i) } .try { |i| IO::Memory.new(i) }
.try { |i| Protodec::Any.parse(i) } .try { |i| Protodec::Any.parse(i) }
.try { |i| i["44:0:embedded"]["2:1:string"].as_s } .try &.["44:0:embedded"]?.try &.["2:1:string"]?.try &.as_s
return plid
end end

View File

@ -267,7 +267,7 @@ def subscribe_ajax(channel_id, action, env_headers)
end end
headers = cookies.add_request_headers(headers) headers = cookies.add_request_headers(headers)
if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/) if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
session_token = match["session_token"] session_token = match["session_token"]
headers["content-type"] = "application/x-www-form-urlencoded" headers["content-type"] = "application/x-www-form-urlencoded"
@ -300,7 +300,7 @@ end
# end # end
# headers = cookies.add_request_headers(headers) # headers = cookies.add_request_headers(headers)
# #
# if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[A-Za-z0-9\_\-\=]+)"/) # if match = html.body.match(/'XSRF_TOKEN': "(?<session_token>[^"]+)"/)
# session_token = match["session_token"] # session_token = match["session_token"]
# #
# headers["content-type"] = "application/x-www-form-urlencoded" # headers["content-type"] = "application/x-www-form-urlencoded"

File diff suppressed because it is too large Load Diff

View File

@ -85,7 +85,7 @@
</p> </p>
<h5 class="pure-g"> <h5 class="pure-g">
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %> <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div> <div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
<% elsif Time.utc - item.published > 1.minute %> <% elsif Time.utc - item.published > 1.minute %>
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div> <div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>
@ -144,7 +144,7 @@
</p> </p>
<h5 class="pure-g"> <h5 class="pure-g">
<% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp && item.premiere_timestamp.not_nil! > Time.utc %> <% if item.responds_to?(:premiere_timestamp) && item.premiere_timestamp.try &.> Time.utc %>
<div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div> <div class="pure-u-2-3"><%= translate(locale, "Premieres in `x`", recode_date((item.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %></div>
<% elsif Time.utc - item.published > 1.minute %> <% elsif Time.utc - item.published > 1.minute %>
<div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div> <div class="pure-u-2-3"><%= translate(locale, "Shared `x` ago", recode_date(item.published, locale)) %></div>

View File

@ -3,23 +3,23 @@
<% if params.autoplay %>autoplay<% end %> <% if params.autoplay %>autoplay<% end %>
<% if params.video_loop %>loop<% end %> <% if params.video_loop %>loop<% end %>
<% if params.controls %>controls<% end %>> <% if params.controls %>controls<% end %>>
<% if hlsvp && !CONFIG.disabled?("livestreams") %> <% if (hlsvp = video.hls_manifest_url) && !CONFIG.disabled?("livestreams") %>
<source src="<%= hlsvp %>?local=true" type="application/x-mpegURL" label="livestream"> <source src="<%= URI.parse(hlsvp).full_path %>?local=true" type="application/x-mpegURL" label="livestream">
<% else %> <% else %>
<% if params.listen %> <% if params.listen %>
<% audio_streams.each_with_index do |fmt, i| %> <% audio_streams.each_with_index do |fmt, i| %>
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>"> <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["bitrate"] %>k" selected="<%= i == 0 ? true : false %>">
<% end %> <% end %>
<% else %> <% else %>
<% if params.quality == "dash" %> <% if params.quality == "dash" %>
<source src="/api/manifest/dash/id/<%= video.id %>?local=true" type='application/dash+xml' label="dash"> <source src="/api/manifest/dash/id/<%= video.id %>?local=true&unique_res=1" type='application/dash+xml' label="dash">
<% end %> <% end %>
<% fmt_stream.each_with_index do |fmt, i| %> <% fmt_stream.each_with_index do |fmt, i| %>
<% if params.quality %> <% if params.quality %>
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= params.quality == fmt["label"].split(" - ")[0] %>"> <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= params.quality == fmt["quality"] %>">
<% else %> <% else %>
<source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["type"] %>' label="<%= fmt["label"] %>" selected="<%= i == 0 ? true : false %>"> <source src="/latest_version?id=<%= video.id %>&itag=<%= fmt["itag"] %><% if params.local %>&local=true<% end %>" type='<%= fmt["mimeType"] %>' label="<%= fmt["quality"] %>" selected="<%= i == 0 ? true : false %>">
<% end %> <% end %>
<% end %> <% end %>
<% end %> <% end %>

View File

@ -33,8 +33,8 @@
"index" => continuation, "index" => continuation,
"plid" => plid, "plid" => plid,
"length_seconds" => video.length_seconds.to_f, "length_seconds" => video.length_seconds.to_f,
"play_next" => !rvs.empty? && !plid && params.continue, "play_next" => !video.related_videos.empty? && !plid && params.continue,
"next_video" => rvs.select { |rv| rv["id"]? }[0]?.try &.["id"], "next_video" => video.related_videos.select { |rv| rv["id"]? }[0]?.try &.["id"],
"youtube_comments_text" => HTML.escape(translate(locale, "View YouTube comments")), "youtube_comments_text" => HTML.escape(translate(locale, "View YouTube comments")),
"reddit_comments_text" => HTML.escape(translate(locale, "View Reddit comments")), "reddit_comments_text" => HTML.escape(translate(locale, "View Reddit comments")),
"reddit_permalink_text" => HTML.escape(translate(locale, "View more comments on Reddit")), "reddit_permalink_text" => HTML.escape(translate(locale, "View more comments on Reddit")),
@ -72,13 +72,13 @@
</h3> </h3>
<% end %> <% end %>
<% if !reason.empty? %> <% if video.reason %>
<h3> <h3>
<%= reason %> <%= video.reason %>
</h3> </h3>
<% elsif video.premiere_timestamp %> <% elsif video.premiere_timestamp.try &.> Time.utc %>
<h3> <h3>
<%= translate(locale, "Premieres in `x`", recode_date((video.premiere_timestamp.as(Time) - Time.utc).ago, locale)) %> <%= video.premiere_timestamp.try { |t| translate(locale, "Premieres in `x`", recode_date((t - Time.utc).ago, locale)) } %>
</h3> </h3>
<% end %> <% end %>
</div> </div>
@ -137,18 +137,18 @@
<label for="download_widget"><%= translate(locale, "Download as: ") %></label> <label for="download_widget"><%= translate(locale, "Download as: ") %></label>
<select style="width:100%" name="download_widget" id="download_widget"> <select style="width:100%" name="download_widget" id="download_widget">
<% fmt_stream.each do |option| %> <% fmt_stream.each do |option| %>
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'> <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
<%= itag_to_metadata?(option["itag"]).try &.["height"]? || "~240" %>p - <%= option["type"].split(";")[0] %> <%= itag_to_metadata?(option["itag"]).try &.["height"]? || "~240" %>p - <%= option["mimeType"].as_s.split(";")[0] %>
</option> </option>
<% end %> <% end %>
<% video_streams.each do |option| %> <% video_streams.each do |option| %>
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'> <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
<%= option["quality_label"] %> - <%= option["type"].split(";")[0] %> @ <%= option["fps"] %>fps - video only <%= option["qualityLabel"] %> - <%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["fps"] %>fps - video only
</option> </option>
<% end %> <% end %>
<% audio_streams.each do |option| %> <% audio_streams.each do |option| %>
<option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["type"].split(";")[0].split("/")[1] %>"}'> <option value='{"id":"<%= video.id %>","itag":"<%= option["itag"] %>","title":"<%= URI.encode_www_form(video.title) %>-<%= video.id %>.<%= option["mimeType"].as_s.split(";")[0].split("/")[1] %>"}'>
<%= option["type"].split(";")[0] %> @ <%= option["bitrate"] %>k - audio only <%= option["mimeType"].as_s.split(";")[0] %> @ <%= option["bitrate"]?.try &.as_i./ 1000 %>k - audio only
</option> </option>
<% end %> <% end %>
<% captions.each do |caption| %> <% captions.each do |caption| %>
@ -169,19 +169,19 @@
<p id="likes"><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p> <p id="likes"><i class="icon ion-ios-thumbs-up"></i> <%= number_with_separator(video.likes) %></p>
<p id="dislikes"><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p> <p id="dislikes"><i class="icon ion-ios-thumbs-down"></i> <%= number_with_separator(video.dislikes) %></p>
<p id="genre"><%= translate(locale, "Genre: ") %> <p id="genre"><%= translate(locale, "Genre: ") %>
<% if video.genre_url.empty? %> <% if !video.genre_url %>
<%= video.genre %> <%= video.genre %>
<% else %> <% else %>
<a href="<%= video.genre_url %>"><%= video.genre %></a> <a href="<%= video.genre_url %>"><%= video.genre %></a>
<% end %> <% end %>
</p> </p>
<% if !video.license.empty? %> <% if video.license %>
<p id="license"><%= translate(locale, "License: ") %><%= video.license %></p> <p id="license"><%= translate(locale, "License: ") %><%= video.license %></p>
<% end %> <% end %>
<p id="family_friendly"><%= translate(locale, "Family friendly? ") %><%= translate_bool(locale, video.is_family_friendly) %></p> <p id="family_friendly"><%= translate(locale, "Family friendly? ") %><%= translate_bool(locale, video.is_family_friendly) %></p>
<p id="wilson"><%= translate(locale, "Wilson score: ") %><%= video.wilson_score.round(4) %></p> <p id="wilson"><%= translate(locale, "Wilson score: ") %><%= video.wilson_score %></p>
<p id="rating"><%= translate(locale, "Rating: ") %><%= rating.round(4) %> / 5</p> <p id="rating"><%= translate(locale, "Rating: ") %><%= video.average_rating %> / 5</p>
<p id="engagement"><%= translate(locale, "Engagement: ") %><%= engagement.round(2) %>%</p> <p id="engagement"><%= translate(locale, "Engagement: ") %><%= video.engagement %>%</p>
<% if video.allowed_regions.size != REGIONS.size %> <% if video.allowed_regions.size != REGIONS.size %>
<p id="allowed_regions"> <p id="allowed_regions">
<% if video.allowed_regions.size < REGIONS.size // 2 %> <% if video.allowed_regions.size < REGIONS.size // 2 %>
@ -198,7 +198,9 @@
<div class="h-box"> <div class="h-box">
<a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content"> <a href="/channel/<%= video.ucid %>" style="display:block;width:fit-content;width:-moz-fit-content">
<div class="channel-profile"> <div class="channel-profile">
<% if !video.author_thumbnail.empty? %>
<img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>"> <img src="/ggpht<%= URI.parse(video.author_thumbnail).full_path %>">
<% end %>
<span id="channel-name"><%= video.author %></span> <span id="channel-name"><%= video.author %></span>
</div> </div>
</a> </a>
@ -209,8 +211,8 @@
<%= rendered "components/subscribe_widget" %> <%= rendered "components/subscribe_widget" %>
<p id="published-date"> <p id="published-date">
<% if video.premiere_timestamp %> <% if video.premiere_timestamp.try &.> Time.utc %>
<b><%= translate(locale, "Premieres `x`", video.premiere_timestamp.not_nil!.to_s("%B %-d, %R UTC")) %></b> <b><%= video.premiere_timestamp.try { |t| translate(locale, "Premieres `x`", t.to_s("%B %-d, %R UTC")) } %></b>
<% else %> <% else %>
<b><%= translate(locale, "Shared `x`", video.published.to_s("%B %-d, %Y")) %></b> <b><%= translate(locale, "Shared `x`", video.published.to_s("%B %-d, %Y")) %></b>
<% end %> <% end %>
@ -244,7 +246,7 @@
<% if params.related_videos %> <% if params.related_videos %>
<div class="h-box"> <div class="h-box">
<% if !rvs.empty? %> <% if !video.related_videos.empty? %>
<div <% if plid %>style="display:none"<% end %>> <div <% if plid %>style="display:none"<% end %>>
<div class="pure-control-group"> <div class="pure-control-group">
<label for="continue"><%= translate(locale, "Play next by default: ") %></label> <label for="continue"><%= translate(locale, "Play next by default: ") %></label>
@ -254,7 +256,7 @@
</div> </div>
<% end %> <% end %>
<% rvs.each do |rv| %> <% video.related_videos.each do |rv| %>
<% if rv["id"]? %> <% if rv["id"]? %>
<a href="/watch?v=<%= rv["id"] %>"> <a href="/watch?v=<%= rv["id"] %>">
<% if !env.get("preferences").as(Preferences).thin_mode %> <% if !env.get("preferences").as(Preferences).thin_mode %>
@ -267,16 +269,18 @@
<h5 class="pure-g"> <h5 class="pure-g">
<div class="pure-u-14-24"> <div class="pure-u-14-24">
<% if rv["ucid"]? %> <% if rv["ucid"]? %>
<b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"] %></a></b> <b style="width:100%"><a href="/channel/<%= rv["ucid"] %>"><%= rv["author"]? %></a></b>
<% else %> <% else %>
<b style="width:100%"><%= rv["author"] %></b> <b style="width:100%"><%= rv["author"]? %></b>
<% end %> <% end %>
</div> </div>
<div class="pure-u-10-24" style="text-align:right"> <div class="pure-u-10-24" style="text-align:right">
<% if views = rv["short_view_count_text"]?.try &.delete(", views watching") %> <% if views = rv["short_view_count_text"]?.try &.delete(", views watching") %>
<% if !views.empty? %>
<b class="width:100%"><%= translate(locale, "`x` views", views) %></b> <b class="width:100%"><%= translate(locale, "`x` views", views) %></b>
<% end %> <% end %>
<% end %>
</div> </div>
</h5> </h5>
</a> </a>