2018-08-04 20:30:44 +00:00
|
|
|
|
class RedditThing
|
2020-07-26 14:58:50 +00:00
|
|
|
|
include JSON::Serializable
|
|
|
|
|
|
|
|
|
|
property kind : String
|
|
|
|
|
property data : RedditComment | RedditLink | RedditMore | RedditListing
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
class RedditComment
|
2020-07-26 14:58:50 +00:00
|
|
|
|
include JSON::Serializable
|
|
|
|
|
|
|
|
|
|
property author : String
|
|
|
|
|
property body_html : String
|
|
|
|
|
property replies : RedditThing | String
|
|
|
|
|
property score : Int32
|
|
|
|
|
property depth : Int32
|
|
|
|
|
property permalink : String
|
|
|
|
|
|
|
|
|
|
@[JSON::Field(converter: RedditComment::TimeConverter)]
|
|
|
|
|
property created_utc : Time
|
|
|
|
|
|
2018-08-13 15:30:27 +00:00
|
|
|
|
module TimeConverter
|
|
|
|
|
def self.from_json(value : JSON::PullParser) : Time
|
2018-11-04 15:37:12 +00:00
|
|
|
|
Time.unix(value.read_float.to_i)
|
2018-08-13 15:30:27 +00:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def self.to_json(value : Time, json : JSON::Builder)
|
2018-11-04 15:37:12 +00:00
|
|
|
|
json.number(value.to_unix)
|
2018-08-13 15:30:27 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
2019-03-29 21:30:02 +00:00
|
|
|
|
struct RedditLink
|
2020-07-26 14:58:50 +00:00
|
|
|
|
include JSON::Serializable
|
|
|
|
|
|
|
|
|
|
property author : String
|
|
|
|
|
property score : Int32
|
|
|
|
|
property subreddit : String
|
|
|
|
|
property num_comments : Int32
|
|
|
|
|
property id : String
|
|
|
|
|
property permalink : String
|
|
|
|
|
property title : String
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
2019-03-29 21:30:02 +00:00
|
|
|
|
struct RedditMore
|
2020-07-26 14:58:50 +00:00
|
|
|
|
include JSON::Serializable
|
|
|
|
|
|
|
|
|
|
property children : Array(String)
|
|
|
|
|
property count : Int32
|
|
|
|
|
property depth : Int32
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
class RedditListing
|
2020-07-26 14:58:50 +00:00
|
|
|
|
include JSON::Serializable
|
|
|
|
|
|
|
|
|
|
property children : Array(RedditThing)
|
|
|
|
|
property modhash : String
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
2021-08-12 17:14:30 +00:00
|
|
|
|
def fetch_youtube_comments(id, cursor, format, locale, thin_mode, region, sort_by = "top")
|
2019-09-04 19:47:27 +00:00
|
|
|
|
case cursor
|
|
|
|
|
when nil, ""
|
|
|
|
|
ctoken = produce_comment_continuation(id, cursor: "", sort_by: sort_by)
|
|
|
|
|
when .starts_with? "ADSJ"
|
|
|
|
|
ctoken = produce_comment_continuation(id, cursor: cursor, sort_by: sort_by)
|
|
|
|
|
else
|
|
|
|
|
ctoken = cursor
|
|
|
|
|
end
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
2021-08-12 17:14:30 +00:00
|
|
|
|
client_config = YoutubeAPI::ClientConfig.new(region: region)
|
|
|
|
|
response = YoutubeAPI.next(continuation: ctoken, client_config: client_config)
|
2021-09-03 23:18:46 +00:00
|
|
|
|
contents = nil
|
2021-08-12 17:14:30 +00:00
|
|
|
|
|
2021-09-26 21:42:00 +00:00
|
|
|
|
if on_response_received_endpoints = response["onResponseReceivedEndpoints"]?
|
2021-09-03 23:18:46 +00:00
|
|
|
|
header = nil
|
2021-09-25 02:15:23 +00:00
|
|
|
|
on_response_received_endpoints.as_a.each do |item|
|
2021-09-03 23:18:46 +00:00
|
|
|
|
if item["reloadContinuationItemsCommand"]?
|
|
|
|
|
case item["reloadContinuationItemsCommand"]["slot"]
|
|
|
|
|
when "RELOAD_CONTINUATION_SLOT_HEADER"
|
|
|
|
|
header = item["reloadContinuationItemsCommand"]["continuationItems"][0]
|
|
|
|
|
when "RELOAD_CONTINUATION_SLOT_BODY"
|
2022-02-14 20:54:26 +00:00
|
|
|
|
# continuationItems is nil when video has no comments
|
|
|
|
|
contents = item["reloadContinuationItemsCommand"]["continuationItems"]?
|
2021-09-03 23:18:46 +00:00
|
|
|
|
end
|
|
|
|
|
elsif item["appendContinuationItemsAction"]?
|
|
|
|
|
contents = item["appendContinuationItemsAction"]["continuationItems"]
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
elsif response["continuationContents"]?
|
2021-08-12 17:14:30 +00:00
|
|
|
|
response = response["continuationContents"]
|
|
|
|
|
if response["commentRepliesContinuation"]?
|
|
|
|
|
body = response["commentRepliesContinuation"]
|
2018-10-31 21:47:53 +00:00
|
|
|
|
else
|
2021-08-12 17:14:30 +00:00
|
|
|
|
body = response["itemSectionContinuation"]
|
|
|
|
|
end
|
|
|
|
|
contents = body["contents"]?
|
|
|
|
|
header = body["header"]?
|
2018-10-31 21:47:53 +00:00
|
|
|
|
else
|
2022-05-27 13:36:13 +00:00
|
|
|
|
raise NotFoundException.new("Comments not found.")
|
2018-10-31 21:47:53 +00:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if !contents
|
|
|
|
|
if format == "json"
|
|
|
|
|
return {"comments" => [] of String}.to_json
|
|
|
|
|
else
|
|
|
|
|
return {"contentHtml" => "", "commentCount" => 0}.to_json
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2021-09-25 02:15:23 +00:00
|
|
|
|
continuation_item_renderer = nil
|
2021-09-03 23:18:46 +00:00
|
|
|
|
contents.as_a.reject! do |item|
|
|
|
|
|
if item["continuationItemRenderer"]?
|
2021-09-25 02:15:23 +00:00
|
|
|
|
continuation_item_renderer = item["continuationItemRenderer"]
|
2021-09-03 23:18:46 +00:00
|
|
|
|
true
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-09 14:31:04 +00:00
|
|
|
|
response = JSON.build do |json|
|
2018-10-31 21:47:53 +00:00
|
|
|
|
json.object do
|
2021-08-12 17:14:30 +00:00
|
|
|
|
if header
|
|
|
|
|
count_text = header["commentsHeaderRenderer"]["countText"]
|
2019-06-22 02:53:28 +00:00
|
|
|
|
comment_count = (count_text["simpleText"]? || count_text["runs"]?.try &.[0]?.try &.["text"]?)
|
|
|
|
|
.try &.as_s.gsub(/\D/, "").to_i? || 0
|
2018-10-31 21:47:53 +00:00
|
|
|
|
json.field "commentCount", comment_count
|
|
|
|
|
end
|
2019-02-03 14:45:34 +00:00
|
|
|
|
|
2019-01-31 11:21:26 +00:00
|
|
|
|
json.field "videoId", id
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
|
|
|
|
json.field "comments" do
|
|
|
|
|
json.array do
|
|
|
|
|
contents.as_a.each do |node|
|
|
|
|
|
json.object do
|
2021-09-03 23:18:46 +00:00
|
|
|
|
if node["commentThreadRenderer"]?
|
2018-10-31 21:47:53 +00:00
|
|
|
|
node = node["commentThreadRenderer"]
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if node["replies"]?
|
|
|
|
|
node_replies = node["replies"]["commentRepliesRenderer"]
|
|
|
|
|
end
|
|
|
|
|
|
2021-09-03 23:18:46 +00:00
|
|
|
|
if node["comment"]?
|
2018-10-31 21:47:53 +00:00
|
|
|
|
node_comment = node["comment"]["commentRenderer"]
|
|
|
|
|
else
|
|
|
|
|
node_comment = node["commentRenderer"]
|
|
|
|
|
end
|
|
|
|
|
|
2022-05-01 16:48:08 +00:00
|
|
|
|
content_html = node_comment["contentText"]?.try { |t| parse_content(t, id) } || ""
|
2019-06-22 01:25:31 +00:00
|
|
|
|
author = node_comment["authorText"]?.try &.["simpleText"]? || ""
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
2022-05-01 19:10:43 +00:00
|
|
|
|
json.field "verified", (node_comment["authorCommentBadge"]? != nil)
|
|
|
|
|
|
2018-10-31 21:47:53 +00:00
|
|
|
|
json.field "author", author
|
|
|
|
|
json.field "authorThumbnails" do
|
|
|
|
|
json.array do
|
|
|
|
|
node_comment["authorThumbnail"]["thumbnails"].as_a.each do |thumbnail|
|
|
|
|
|
json.object do
|
|
|
|
|
json.field "url", thumbnail["url"]
|
|
|
|
|
json.field "width", thumbnail["width"]
|
|
|
|
|
json.field "height", thumbnail["height"]
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if node_comment["authorEndpoint"]?
|
|
|
|
|
json.field "authorId", node_comment["authorEndpoint"]["browseEndpoint"]["browseId"]
|
|
|
|
|
json.field "authorUrl", node_comment["authorEndpoint"]["browseEndpoint"]["canonicalBaseUrl"]
|
|
|
|
|
else
|
|
|
|
|
json.field "authorId", ""
|
|
|
|
|
json.field "authorUrl", ""
|
|
|
|
|
end
|
|
|
|
|
|
2019-02-01 12:09:10 +00:00
|
|
|
|
published_text = node_comment["publishedTimeText"]["runs"][0]["text"].as_s
|
|
|
|
|
published = decode_date(published_text.rchop(" (edited)"))
|
2019-02-03 14:45:34 +00:00
|
|
|
|
|
2019-02-01 12:09:10 +00:00
|
|
|
|
if published_text.includes?(" (edited)")
|
|
|
|
|
json.field "isEdited", true
|
|
|
|
|
else
|
|
|
|
|
json.field "isEdited", false
|
|
|
|
|
end
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
2019-06-08 20:08:27 +00:00
|
|
|
|
json.field "content", html_to_content(content_html)
|
2018-10-31 21:47:53 +00:00
|
|
|
|
json.field "contentHtml", content_html
|
2019-07-02 23:53:19 +00:00
|
|
|
|
|
2018-11-04 15:37:12 +00:00
|
|
|
|
json.field "published", published.to_unix
|
2019-02-20 14:49:39 +00:00
|
|
|
|
json.field "publishedText", translate(locale, "`x` ago", recode_date(published, locale))
|
2019-07-02 23:53:19 +00:00
|
|
|
|
|
2021-05-21 20:15:56 +00:00
|
|
|
|
comment_action_buttons_renderer = node_comment["actionButtons"]["commentActionButtonsRenderer"]
|
|
|
|
|
|
|
|
|
|
json.field "likeCount", comment_action_buttons_renderer["likeButton"]["toggleButtonRenderer"]["accessibilityData"]["accessibilityData"]["label"].as_s.scan(/\d/).map(&.[0]).join.to_i
|
2018-10-31 21:47:53 +00:00
|
|
|
|
json.field "commentId", node_comment["commentId"]
|
2019-01-31 11:21:26 +00:00
|
|
|
|
json.field "authorIsChannelOwner", node_comment["authorIsChannelOwner"]
|
2019-02-03 14:45:34 +00:00
|
|
|
|
|
2021-05-21 20:15:56 +00:00
|
|
|
|
if comment_action_buttons_renderer["creatorHeart"]?
|
|
|
|
|
hearth_data = comment_action_buttons_renderer["creatorHeart"]["creatorHeartRenderer"]["creatorThumbnail"]
|
2019-01-31 11:21:26 +00:00
|
|
|
|
json.field "creatorHeart" do
|
|
|
|
|
json.object do
|
2019-02-01 12:09:10 +00:00
|
|
|
|
json.field "creatorThumbnail", hearth_data["thumbnails"][-1]["url"]
|
|
|
|
|
json.field "creatorName", hearth_data["accessibility"]["accessibilityData"]["label"]
|
2019-01-31 11:21:26 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
|
|
|
|
if node_replies && !response["commentRepliesContinuation"]?
|
2021-02-20 23:08:52 +00:00
|
|
|
|
if node_replies["moreText"]?
|
|
|
|
|
reply_count = (node_replies["moreText"]["simpleText"]? || node_replies["moreText"]["runs"]?.try &.[0]?.try &.["text"]?)
|
|
|
|
|
.try &.as_s.gsub(/\D/, "").to_i? || 1
|
|
|
|
|
elsif node_replies["viewReplies"]?
|
|
|
|
|
reply_count = node_replies["viewReplies"]["buttonRenderer"]["text"]?.try &.["runs"][1]?.try &.["text"]?.try &.as_s.to_i? || 1
|
|
|
|
|
else
|
|
|
|
|
reply_count = 1
|
|
|
|
|
end
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
2021-08-12 17:14:30 +00:00
|
|
|
|
if node_replies["continuations"]?
|
|
|
|
|
continuation = node_replies["continuations"]?.try &.as_a[0]["nextContinuationData"]["continuation"].as_s
|
|
|
|
|
elsif node_replies["contents"]?
|
|
|
|
|
continuation = node_replies["contents"]?.try &.as_a[0]["continuationItemRenderer"]["continuationEndpoint"]["continuationCommand"]["token"].as_s
|
|
|
|
|
end
|
2019-01-20 15:56:54 +00:00
|
|
|
|
continuation ||= ""
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
|
|
|
|
json.field "replies" do
|
|
|
|
|
json.object do
|
|
|
|
|
json.field "replyCount", reply_count
|
|
|
|
|
json.field "continuation", continuation
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2021-09-25 02:15:23 +00:00
|
|
|
|
if continuation_item_renderer
|
|
|
|
|
if continuation_item_renderer["continuationEndpoint"]?
|
|
|
|
|
continuation_endpoint = continuation_item_renderer["continuationEndpoint"]
|
|
|
|
|
elsif continuation_item_renderer["button"]?
|
|
|
|
|
continuation_endpoint = continuation_item_renderer["button"]["buttonRenderer"]["command"]
|
2021-09-03 23:18:46 +00:00
|
|
|
|
end
|
2021-09-25 02:15:23 +00:00
|
|
|
|
if continuation_endpoint
|
|
|
|
|
json.field "continuation", continuation_endpoint["continuationCommand"]["token"].as_s
|
2021-09-03 23:18:46 +00:00
|
|
|
|
end
|
2018-10-31 21:47:53 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-11-10 15:05:26 +00:00
|
|
|
|
if format == "html"
|
2019-07-09 14:31:04 +00:00
|
|
|
|
response = JSON.parse(response)
|
2021-08-12 17:14:30 +00:00
|
|
|
|
content_html = template_youtube_comments(response, locale, thin_mode)
|
2018-11-10 15:05:26 +00:00
|
|
|
|
|
2019-07-09 14:31:04 +00:00
|
|
|
|
response = JSON.build do |json|
|
2018-11-10 15:05:26 +00:00
|
|
|
|
json.object do
|
|
|
|
|
json.field "contentHtml", content_html
|
|
|
|
|
|
2019-07-09 14:31:04 +00:00
|
|
|
|
if response["commentCount"]?
|
|
|
|
|
json.field "commentCount", response["commentCount"]
|
2018-11-10 15:05:26 +00:00
|
|
|
|
else
|
|
|
|
|
json.field "commentCount", 0
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-07-09 14:31:04 +00:00
|
|
|
|
return response
|
2018-10-31 21:47:53 +00:00
|
|
|
|
end
|
|
|
|
|
|
2019-04-14 23:08:00 +00:00
|
|
|
|
def fetch_reddit_comments(id, sort_by = "confidence")
|
2018-10-31 21:47:53 +00:00
|
|
|
|
client = make_client(REDDIT_URL)
|
2021-01-07 20:09:24 +00:00
|
|
|
|
headers = HTTP::Headers{"User-Agent" => "web:invidious:v#{CURRENT_VERSION} (by github.com/iv-org/invidious)"}
|
2018-10-31 21:47:53 +00:00
|
|
|
|
|
2019-06-15 23:58:21 +00:00
|
|
|
|
# TODO: Use something like #479 for a static list of instances to use here
|
2022-01-17 18:11:47 +00:00
|
|
|
|
query = URI::Params.encode({q: "(url:3D#{id} OR url:#{id}) AND (site:invidio.us OR site:youtube.com OR site:youtu.be)"})
|
|
|
|
|
search_results = client.get("/search.json?#{query}", headers)
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
|
|
|
|
if search_results.status_code == 200
|
|
|
|
|
search_results = RedditThing.from_json(search_results.body)
|
|
|
|
|
|
2019-04-14 23:08:00 +00:00
|
|
|
|
# For videos that have more than one thread, choose the one with the highest score
|
2022-01-17 18:11:47 +00:00
|
|
|
|
threads = search_results.data.as(RedditListing).children
|
2022-01-17 19:03:36 +00:00
|
|
|
|
thread = threads.max_by?(&.data.as(RedditLink).score).try(&.data.as(RedditLink))
|
2022-01-17 18:11:47 +00:00
|
|
|
|
result = thread.try do |t|
|
|
|
|
|
body = client.get("/r/#{t.subreddit}/comments/#{t.id}.json?limit=100&sort=#{sort_by}", headers).body
|
|
|
|
|
Array(RedditThing).from_json(body)
|
|
|
|
|
end
|
|
|
|
|
result ||= [] of RedditThing
|
2018-08-04 20:30:44 +00:00
|
|
|
|
elsif search_results.status_code == 302
|
2019-04-14 23:08:00 +00:00
|
|
|
|
# Previously, if there was only one result then the API would redirect to that result.
|
|
|
|
|
# Now, it appears it will still return a listing so this section is likely unnecessary.
|
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
|
result = client.get(search_results.headers["Location"], headers).body
|
|
|
|
|
result = Array(RedditThing).from_json(result)
|
|
|
|
|
|
|
|
|
|
thread = result[0].data.as(RedditListing).children[0].data.as(RedditLink)
|
|
|
|
|
else
|
2022-05-27 13:36:13 +00:00
|
|
|
|
raise NotFoundException.new("Comments not found.")
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
2020-12-23 05:52:23 +00:00
|
|
|
|
client.close
|
|
|
|
|
|
2022-01-17 18:11:47 +00:00
|
|
|
|
comments = result[1]?.try(&.data.as(RedditListing).children)
|
|
|
|
|
comments ||= [] of RedditThing
|
2018-08-04 20:30:44 +00:00
|
|
|
|
return comments, thread
|
|
|
|
|
end
|
|
|
|
|
|
2021-03-18 05:23:32 +00:00
|
|
|
|
def template_youtube_comments(comments, locale, thin_mode, is_replies = false)
|
2019-07-01 18:38:30 +00:00
|
|
|
|
String.build do |html|
|
|
|
|
|
root = comments["comments"].as_a
|
|
|
|
|
root.each do |child|
|
|
|
|
|
if child["replies"]?
|
2022-01-05 01:42:49 +00:00
|
|
|
|
replies_count_text = translate_count(locale,
|
|
|
|
|
"comments_view_x_replies",
|
2022-01-13 19:43:46 +00:00
|
|
|
|
child["replies"]["replyCount"].as_i64 || 0,
|
2022-01-05 01:42:49 +00:00
|
|
|
|
NumberFormatting::Separator
|
|
|
|
|
)
|
|
|
|
|
|
2019-07-01 18:38:30 +00:00
|
|
|
|
replies_html = <<-END_HTML
|
|
|
|
|
<div id="replies" class="pure-g">
|
|
|
|
|
<div class="pure-u-1-24"></div>
|
|
|
|
|
<div class="pure-u-23-24">
|
|
|
|
|
<p>
|
|
|
|
|
<a href="javascript:void(0)" data-continuation="#{child["replies"]["continuation"]}"
|
2022-01-05 01:42:49 +00:00
|
|
|
|
data-onclick="get_youtube_replies" data-load-replies>#{replies_count_text}</a>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
</p>
|
|
|
|
|
</div>
|
2018-08-04 20:30:44 +00:00
|
|
|
|
</div>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
2018-09-17 23:39:28 +00:00
|
|
|
|
|
2019-03-27 16:31:05 +00:00
|
|
|
|
if !thin_mode
|
2021-01-31 18:52:32 +00:00
|
|
|
|
author_thumbnail = "/ggpht#{URI.parse(child["authorThumbnails"][-1]["url"].as_s).request_target}"
|
2019-03-27 16:31:05 +00:00
|
|
|
|
else
|
2019-07-01 18:38:30 +00:00
|
|
|
|
author_thumbnail = ""
|
2019-03-27 16:31:05 +00:00
|
|
|
|
end
|
|
|
|
|
|
2021-07-15 21:01:36 +00:00
|
|
|
|
author_name = HTML.escape(child["author"].as_s)
|
2022-05-01 19:10:43 +00:00
|
|
|
|
if child["verified"]?.try &.as_bool && child["authorIsChannelOwner"]?.try &.as_bool
|
|
|
|
|
author_name += " <i class=\"icon ion ion-md-checkmark-circle\"></i>"
|
|
|
|
|
elsif child["verified"]?.try &.as_bool
|
|
|
|
|
author_name += " <i class=\"icon ion ion-md-checkmark\"></i>"
|
|
|
|
|
end
|
2019-07-01 18:38:30 +00:00
|
|
|
|
html << <<-END_HTML
|
2019-07-09 14:31:04 +00:00
|
|
|
|
<div class="pure-g" style="width:100%">
|
2019-07-01 18:38:30 +00:00
|
|
|
|
<div class="channel-profile pure-u-4-24 pure-u-md-2-24">
|
2021-10-14 15:18:44 +00:00
|
|
|
|
<img loading="lazy" style="margin-right:1em;margin-top:1em;width:90%" src="#{author_thumbnail}">
|
2019-07-01 18:38:30 +00:00
|
|
|
|
</div>
|
|
|
|
|
<div class="pure-u-20-24 pure-u-md-22-24">
|
|
|
|
|
<p>
|
|
|
|
|
<b>
|
2021-07-15 21:01:36 +00:00
|
|
|
|
<a class="#{child["authorIsChannelOwner"] == true ? "channel-owner" : ""}" href="#{child["authorUrl"]}">#{author_name}</a>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
</b>
|
|
|
|
|
<p style="white-space:pre-wrap">#{child["contentHtml"]}</p>
|
2019-07-09 14:31:04 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
|
|
if child["attachment"]?
|
|
|
|
|
attachment = child["attachment"]
|
|
|
|
|
|
|
|
|
|
case attachment["type"]
|
|
|
|
|
when "image"
|
|
|
|
|
attachment = attachment["imageThumbnails"][1]
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<div class="pure-g">
|
|
|
|
|
<div class="pure-u-1 pure-u-md-1-2">
|
2021-10-14 15:18:44 +00:00
|
|
|
|
<img loading="lazy" style="width:100%" src="/ggpht#{URI.parse(attachment["url"].as_s).request_target}">
|
2019-07-09 14:31:04 +00:00
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
END_HTML
|
|
|
|
|
when "video"
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<div class="pure-g">
|
|
|
|
|
<div class="pure-u-1 pure-u-md-1-2">
|
|
|
|
|
<div style="position:relative;width:100%;height:0;padding-bottom:56.25%;margin-bottom:5px">
|
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
|
|
if attachment["error"]?
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<p>#{attachment["error"]}</p>
|
|
|
|
|
END_HTML
|
|
|
|
|
else
|
|
|
|
|
html << <<-END_HTML
|
2020-02-07 01:16:43 +00:00
|
|
|
|
<iframe id='ivplayer' style='position:absolute;width:100%;height:100%;left:0;top:0' src='/embed/#{attachment["videoId"]?}?autoplay=0' style='border:none;'></iframe>
|
2019-07-09 14:31:04 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
END_HTML
|
2020-04-09 17:18:09 +00:00
|
|
|
|
else nil # Ignore
|
2019-07-09 14:31:04 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<span title="#{Time.unix(child["published"].as_i64).to_s(translate(locale, "%A %B %-d, %Y"))}">#{translate(locale, "`x` ago", recode_date(Time.unix(child["published"].as_i64), locale))} #{child["isEdited"] == true ? translate(locale, "(edited)") : ""}</span>
|
|
|
|
|
|
|
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
|
|
if comments["videoId"]?
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<a href="https://www.youtube.com/watch?v=#{comments["videoId"]}&lc=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
|
|
|
|
|
|
|
|
|
|
|
END_HTML
|
|
|
|
|
elsif comments["authorId"]?
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<a href="https://www.youtube.com/channel/#{comments["authorId"]}/community?lb=#{child["commentId"]}" title="#{translate(locale, "YouTube comment permalink")}">[YT]</a>
|
|
|
|
|
|
|
|
|
|
|
END_HTML
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<i class="icon ion-ios-thumbs-up"></i> #{number_with_separator(child["likeCount"])}
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
|
|
|
|
|
if child["creatorHeart"]?
|
|
|
|
|
if !thin_mode
|
2021-01-31 18:52:32 +00:00
|
|
|
|
creator_thumbnail = "/ggpht#{URI.parse(child["creatorHeart"]["creatorThumbnail"].as_s).request_target}"
|
2019-07-01 18:38:30 +00:00
|
|
|
|
else
|
|
|
|
|
creator_thumbnail = ""
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
2019-02-01 12:09:10 +00:00
|
|
|
|
<span class="creator-heart-container" title="#{translate(locale, "`x` marked it with a ❤", child["creatorHeart"]["creatorName"].as_s)}">
|
|
|
|
|
<div class="creator-heart">
|
2021-10-14 15:18:44 +00:00
|
|
|
|
<img loading="lazy" class="creator-heart-background-hearted" src="#{creator_thumbnail}"></img>
|
2019-02-01 12:09:10 +00:00
|
|
|
|
<div class="creator-heart-small-hearted">
|
2019-02-16 23:46:04 +00:00
|
|
|
|
<div class="icon ion-ios-heart creator-heart-small-container"></div>
|
2019-02-01 12:09:10 +00:00
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
</span>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
2019-02-03 14:45:34 +00:00
|
|
|
|
|
2019-07-01 18:38:30 +00:00
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
</p>
|
|
|
|
|
#{replies_html}
|
|
|
|
|
</div>
|
2018-10-06 23:53:27 +00:00
|
|
|
|
</div>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
2019-07-01 18:38:30 +00:00
|
|
|
|
if comments["continuation"]?
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<div class="pure-g">
|
|
|
|
|
<div class="pure-u-1">
|
|
|
|
|
<p>
|
|
|
|
|
<a href="javascript:void(0)" data-continuation="#{comments["continuation"]}"
|
2021-03-18 05:23:32 +00:00
|
|
|
|
data-onclick="get_youtube_replies" data-load-more #{"data-load-replies" if is_replies}>#{translate(locale, "Load more")}</a>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
</p>
|
|
|
|
|
</div>
|
2018-08-04 20:30:44 +00:00
|
|
|
|
</div>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-12-20 21:32:09 +00:00
|
|
|
|
def template_reddit_comments(root, locale)
|
2019-07-01 18:38:30 +00:00
|
|
|
|
String.build do |html|
|
|
|
|
|
root.each do |child|
|
|
|
|
|
if child.data.is_a?(RedditComment)
|
|
|
|
|
child = child.data.as(RedditComment)
|
|
|
|
|
body_html = HTML.unescape(child.body_html)
|
|
|
|
|
|
|
|
|
|
replies_html = ""
|
|
|
|
|
if child.replies.is_a?(RedditThing)
|
|
|
|
|
replies = child.replies.as(RedditThing)
|
|
|
|
|
replies_html = template_reddit_comments(replies.data.as(RedditListing).children, locale)
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
2019-07-01 18:38:30 +00:00
|
|
|
|
if child.depth > 0
|
|
|
|
|
html << <<-END_HTML
|
2018-08-04 20:30:44 +00:00
|
|
|
|
<div class="pure-g">
|
|
|
|
|
<div class="pure-u-1-24">
|
|
|
|
|
</div>
|
|
|
|
|
<div class="pure-u-23-24">
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
else
|
|
|
|
|
html << <<-END_HTML
|
2018-08-04 20:30:44 +00:00
|
|
|
|
<div class="pure-g">
|
|
|
|
|
<div class="pure-u-1">
|
2019-07-01 18:38:30 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
html << <<-END_HTML
|
|
|
|
|
<p>
|
2022-05-06 01:46:59 +00:00
|
|
|
|
<a href="javascript:void(0)" data-onclick="toggle_parent">[ − ]</a>
|
2019-07-01 18:38:30 +00:00
|
|
|
|
<b><a href="https://www.reddit.com/user/#{child.author}">#{child.author}</a></b>
|
2022-01-05 01:42:49 +00:00
|
|
|
|
#{translate_count(locale, "comments_points_count", child.score, NumberFormatting::Separator)}
|
2019-07-01 18:38:30 +00:00
|
|
|
|
<span title="#{child.created_utc.to_s(translate(locale, "%a %B %-d %T %Y UTC"))}">#{translate(locale, "`x` ago", recode_date(child.created_utc, locale))}</span>
|
|
|
|
|
<a href="https://www.reddit.com#{child.permalink}" title="#{translate(locale, "permalink")}">#{translate(locale, "permalink")}</a>
|
|
|
|
|
</p>
|
|
|
|
|
<div>
|
|
|
|
|
#{body_html}
|
|
|
|
|
#{replies_html}
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
|
|
|
|
</div>
|
2018-08-04 20:30:44 +00:00
|
|
|
|
END_HTML
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2018-09-04 03:15:47 +00:00
|
|
|
|
def replace_links(html)
|
2022-05-24 23:34:36 +00:00
|
|
|
|
# Check if the document is empty
|
|
|
|
|
# Prevents edge-case bug with Reddit comments, see issue #3115
|
|
|
|
|
if html.nil? || html.empty?
|
|
|
|
|
return html
|
|
|
|
|
end
|
|
|
|
|
|
2018-09-04 03:15:47 +00:00
|
|
|
|
html = XML.parse_html(html)
|
2018-08-04 20:30:44 +00:00
|
|
|
|
|
2018-09-04 03:15:47 +00:00
|
|
|
|
html.xpath_nodes(%q(//a)).each do |anchor|
|
2018-08-04 20:30:44 +00:00
|
|
|
|
url = URI.parse(anchor["href"])
|
|
|
|
|
|
2021-09-03 19:28:34 +00:00
|
|
|
|
if url.host.nil? || url.host.not_nil!.ends_with?("youtube.com") || url.host.not_nil!.ends_with?("youtu.be")
|
|
|
|
|
if url.host.try &.ends_with? "youtu.be"
|
|
|
|
|
url = "/watch?v=#{url.path.lstrip('/')}#{url.query_params}"
|
2018-08-04 20:30:44 +00:00
|
|
|
|
else
|
2021-09-03 10:33:49 +00:00
|
|
|
|
if url.path == "/redirect"
|
|
|
|
|
params = HTTP::Params.parse(url.query.not_nil!)
|
|
|
|
|
anchor["href"] = params["q"]?
|
|
|
|
|
else
|
|
|
|
|
anchor["href"] = url.request_target
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
elsif url.to_s == "#"
|
2018-08-07 04:00:39 +00:00
|
|
|
|
begin
|
|
|
|
|
length_seconds = decode_length_seconds(anchor.content)
|
|
|
|
|
rescue ex
|
|
|
|
|
length_seconds = decode_time(anchor.content)
|
|
|
|
|
end
|
|
|
|
|
|
2021-03-11 00:42:13 +00:00
|
|
|
|
if length_seconds > 0
|
|
|
|
|
anchor["href"] = "javascript:void(0)"
|
|
|
|
|
anchor["onclick"] = "player.currentTime(#{length_seconds})"
|
|
|
|
|
else
|
|
|
|
|
anchor["href"] = url.request_target
|
|
|
|
|
end
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2019-04-11 16:53:07 +00:00
|
|
|
|
html = html.xpath_node(%q(//body)).not_nil!
|
|
|
|
|
if node = html.xpath_node(%q(./p))
|
|
|
|
|
html = node
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
def fill_links(html, scheme, host)
|
2022-05-24 23:34:36 +00:00
|
|
|
|
# Check if the document is empty
|
|
|
|
|
# Prevents edge-case bug with Reddit comments, see issue #3115
|
|
|
|
|
if html.nil? || html.empty?
|
|
|
|
|
return html
|
|
|
|
|
end
|
|
|
|
|
|
2018-08-04 20:30:44 +00:00
|
|
|
|
html = XML.parse_html(html)
|
|
|
|
|
|
|
|
|
|
html.xpath_nodes("//a").each do |match|
|
|
|
|
|
url = URI.parse(match["href"])
|
|
|
|
|
# Reddit links don't have host
|
|
|
|
|
if !url.host && !match["href"].starts_with?("javascript") && !url.to_s.ends_with? "#"
|
|
|
|
|
url.scheme = scheme
|
|
|
|
|
url.host = host
|
|
|
|
|
match["href"] = url
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
|
|
|
|
if host == "www.youtube.com"
|
2019-04-11 16:53:07 +00:00
|
|
|
|
html = html.xpath_node(%q(//body/p)).not_nil!
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
|
|
|
|
|
2019-04-11 16:53:07 +00:00
|
|
|
|
return html.to_xml(options: XML::SaveOptions::NO_DECL)
|
2018-08-04 20:30:44 +00:00
|
|
|
|
end
|
2018-09-19 20:25:49 +00:00
|
|
|
|
|
2022-03-14 21:37:22 +00:00
|
|
|
|
def text_to_parsed_content(text : String) : JSON::Any
|
|
|
|
|
nodes = [] of JSON::Any
|
2022-04-18 15:29:04 +00:00
|
|
|
|
# For each line convert line to array of nodes
|
2022-03-14 21:37:22 +00:00
|
|
|
|
text.split('\n').each do |line|
|
2022-04-18 15:29:04 +00:00
|
|
|
|
# In first case line is just a simple node before
|
|
|
|
|
# check patterns inside line
|
|
|
|
|
# { 'text': line }
|
2022-03-14 21:37:22 +00:00
|
|
|
|
currentNodes = [] of JSON::Any
|
|
|
|
|
initialNode = {"text" => line}
|
|
|
|
|
currentNodes << (JSON.parse(initialNode.to_json))
|
2022-04-18 15:29:04 +00:00
|
|
|
|
|
|
|
|
|
# For each match with url pattern, get last node and preserve
|
|
|
|
|
# last node before create new node with url information
|
|
|
|
|
# { 'text': match, 'navigationEndpoint': { 'urlEndpoint' : 'url': match } }
|
2022-04-18 15:20:47 +00:00
|
|
|
|
line.scan(/https?:\/\/[^ ]*/).each do |urlMatch|
|
2022-04-18 15:29:04 +00:00
|
|
|
|
# Retrieve last node and update node without match
|
2022-03-14 21:37:22 +00:00
|
|
|
|
lastNode = currentNodes[currentNodes.size - 1].as_h
|
2022-04-18 15:20:47 +00:00
|
|
|
|
splittedLastNode = lastNode["text"].as_s.split(urlMatch[0])
|
2022-03-14 21:37:22 +00:00
|
|
|
|
lastNode["text"] = JSON.parse(splittedLastNode[0].to_json)
|
|
|
|
|
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
2022-04-18 15:29:04 +00:00
|
|
|
|
# Create new node with match and navigation infos
|
2022-04-18 15:20:47 +00:00
|
|
|
|
currentNode = {"text" => urlMatch[0], "navigationEndpoint" => {"urlEndpoint" => {"url" => urlMatch[0]}}}
|
2022-03-14 21:37:22 +00:00
|
|
|
|
currentNodes << (JSON.parse(currentNode.to_json))
|
2022-04-18 15:29:04 +00:00
|
|
|
|
# If text remain after match create new simple node with text after match
|
2022-03-14 21:37:22 +00:00
|
|
|
|
afterNode = {"text" => splittedLastNode.size > 0 ? splittedLastNode[1] : ""}
|
|
|
|
|
currentNodes << (JSON.parse(afterNode.to_json))
|
|
|
|
|
end
|
2022-04-18 15:29:04 +00:00
|
|
|
|
|
|
|
|
|
# After processing of matches inside line
|
|
|
|
|
# Add \n at end of last node for preserve carriage return
|
2022-03-14 21:37:22 +00:00
|
|
|
|
lastNode = currentNodes[currentNodes.size - 1].as_h
|
|
|
|
|
lastNode["text"] = JSON.parse("#{currentNodes[currentNodes.size - 1]["text"]}\n".to_json)
|
|
|
|
|
currentNodes[currentNodes.size - 1] = JSON.parse(lastNode.to_json)
|
2022-04-18 15:29:04 +00:00
|
|
|
|
|
|
|
|
|
# Finally add final nodes to nodes returned
|
2022-03-14 21:37:22 +00:00
|
|
|
|
currentNodes.each do |node|
|
|
|
|
|
nodes << (node)
|
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
return JSON.parse({"runs" => nodes}.to_json)
|
|
|
|
|
end
|
|
|
|
|
|
2022-05-01 16:48:08 +00:00
|
|
|
|
def parse_content(content : JSON::Any, video_id : String? = "") : String
|
2020-06-15 22:33:23 +00:00
|
|
|
|
content["simpleText"]?.try &.as_s.rchop('\ufeff').try { |b| HTML.escape(b) }.to_s ||
|
2022-05-01 16:48:08 +00:00
|
|
|
|
content["runs"]?.try &.as_a.try { |r| content_to_comment_html(r, video_id).try &.to_s.gsub("\n", "<br>") } || ""
|
2020-06-15 22:33:23 +00:00
|
|
|
|
end
|
|
|
|
|
|
2022-05-01 16:48:08 +00:00
|
|
|
|
def content_to_comment_html(content, video_id : String? = "")
|
2022-05-01 17:08:11 +00:00
|
|
|
|
html_array = content.map do |run|
|
2022-05-18 21:36:50 +00:00
|
|
|
|
# Sometimes, there is an empty element.
|
|
|
|
|
# See: https://github.com/iv-org/invidious/issues/3096
|
|
|
|
|
next if run.as_h.empty?
|
|
|
|
|
|
2021-12-07 04:40:19 +00:00
|
|
|
|
text = HTML.escape(run["text"].as_s)
|
2018-09-19 20:25:49 +00:00
|
|
|
|
|
|
|
|
|
if run["navigationEndpoint"]?
|
2018-11-19 22:24:21 +00:00
|
|
|
|
if url = run["navigationEndpoint"]["urlEndpoint"]?.try &.["url"].as_s
|
2018-09-19 20:25:49 +00:00
|
|
|
|
url = URI.parse(url)
|
2022-05-01 17:08:11 +00:00
|
|
|
|
displayed_url = text
|
2018-09-19 20:25:49 +00:00
|
|
|
|
|
2021-09-01 10:59:47 +00:00
|
|
|
|
if url.host == "youtu.be"
|
2022-02-26 16:53:39 +00:00
|
|
|
|
url = "/watch?v=#{url.request_target.lstrip('/')}"
|
2021-09-03 19:28:34 +00:00
|
|
|
|
elsif url.host.nil? || url.host.not_nil!.ends_with?("youtube.com")
|
2018-09-19 20:25:49 +00:00
|
|
|
|
if url.path == "/redirect"
|
2021-11-29 13:53:27 +00:00
|
|
|
|
# Sometimes, links can be corrupted (why?) so make sure to fallback
|
|
|
|
|
# nicely. See https://github.com/iv-org/invidious/issues/2682
|
2022-05-01 17:08:11 +00:00
|
|
|
|
url = url.query_params["q"]? || ""
|
2022-02-26 16:53:39 +00:00
|
|
|
|
displayed_url = url
|
2018-09-19 20:25:49 +00:00
|
|
|
|
else
|
2021-01-31 18:52:32 +00:00
|
|
|
|
url = url.request_target
|
2022-02-26 16:53:39 +00:00
|
|
|
|
displayed_url = "youtube.com#{url}"
|
2018-09-19 20:25:49 +00:00
|
|
|
|
end
|
|
|
|
|
end
|
|
|
|
|
|
2022-02-25 20:07:12 +00:00
|
|
|
|
text = %(<a href="#{url}">#{reduce_uri(displayed_url)}</a>)
|
2018-11-19 22:24:21 +00:00
|
|
|
|
elsif watch_endpoint = run["navigationEndpoint"]["watchEndpoint"]?
|
2022-05-01 16:48:08 +00:00
|
|
|
|
start_time = watch_endpoint["startTimeSeconds"]?.try &.as_i
|
|
|
|
|
link_video_id = watch_endpoint["videoId"].as_s
|
|
|
|
|
|
|
|
|
|
url = "/watch?v=#{link_video_id}"
|
|
|
|
|
url += "&t=#{start_time}" if !start_time.nil?
|
|
|
|
|
|
|
|
|
|
# If the current video ID (passed through from the caller function)
|
|
|
|
|
# is the same as the video ID in the link, add HTML attributes for
|
|
|
|
|
# the JS handler function that bypasses page reload.
|
|
|
|
|
#
|
|
|
|
|
# See: https://github.com/iv-org/invidious/issues/3063
|
|
|
|
|
if link_video_id == video_id
|
|
|
|
|
start_time ||= 0
|
|
|
|
|
text = %(<a href="#{url}" data-onclick="jump_to_time" data-jump-time="#{start_time}">#{reduce_uri(text)}</a>)
|
2019-01-20 15:51:24 +00:00
|
|
|
|
else
|
2022-05-01 16:48:08 +00:00
|
|
|
|
text = %(<a href="#{url}">#{text}</a>)
|
2019-01-20 15:51:24 +00:00
|
|
|
|
end
|
2021-08-12 17:57:22 +00:00
|
|
|
|
elsif url = run.dig?("navigationEndpoint", "commandMetadata", "webCommandMetadata", "url").try &.as_s
|
2022-05-01 15:42:53 +00:00
|
|
|
|
if text.starts_with?(/\s?[@#]/)
|
|
|
|
|
# Handle "pings" in comments and hasthags differently
|
|
|
|
|
# See:
|
|
|
|
|
# - https://github.com/iv-org/invidious/issues/3038
|
|
|
|
|
# - https://github.com/iv-org/invidious/issues/3062
|
2022-04-17 16:02:47 +00:00
|
|
|
|
text = %(<a href="#{url}">#{text}</a>)
|
|
|
|
|
else
|
|
|
|
|
text = %(<a href="#{url}">#{reduce_uri(url)}</a>)
|
|
|
|
|
end
|
2018-11-19 22:24:21 +00:00
|
|
|
|
end
|
2018-09-19 20:25:49 +00:00
|
|
|
|
end
|
|
|
|
|
|
2022-05-01 17:08:11 +00:00
|
|
|
|
text = "<b>#{text}</b>" if run["bold"]?
|
|
|
|
|
text = "<i>#{text}</i>" if run["italics"]?
|
|
|
|
|
|
2018-09-19 20:25:49 +00:00
|
|
|
|
text
|
2022-05-01 17:08:11 +00:00
|
|
|
|
end
|
2018-09-19 20:25:49 +00:00
|
|
|
|
|
2022-05-01 17:08:11 +00:00
|
|
|
|
return html_array.join("").delete('\ufeff')
|
2018-09-19 20:25:49 +00:00
|
|
|
|
end
|
2019-04-03 23:42:12 +00:00
|
|
|
|
|
|
|
|
|
def produce_comment_continuation(video_id, cursor = "", sort_by = "top")
|
2019-10-27 17:50:42 +00:00
|
|
|
|
object = {
|
|
|
|
|
"2:embedded" => {
|
|
|
|
|
"2:string" => video_id,
|
2020-12-06 03:11:41 +00:00
|
|
|
|
"25:varint" => 0_i64,
|
2019-10-27 17:50:42 +00:00
|
|
|
|
"28:varint" => 1_i64,
|
|
|
|
|
"36:embedded" => {
|
|
|
|
|
"5:varint" => -1_i64,
|
|
|
|
|
"8:varint" => 0_i64,
|
|
|
|
|
},
|
2020-12-06 03:11:41 +00:00
|
|
|
|
"40:embedded" => {
|
|
|
|
|
"1:varint" => 4_i64,
|
|
|
|
|
"3:string" => "https://www.youtube.com",
|
|
|
|
|
"4:string" => "",
|
|
|
|
|
},
|
2019-10-27 17:50:42 +00:00
|
|
|
|
},
|
|
|
|
|
"3:varint" => 6_i64,
|
|
|
|
|
"6:embedded" => {
|
|
|
|
|
"1:string" => cursor,
|
|
|
|
|
"4:embedded" => {
|
|
|
|
|
"4:string" => video_id,
|
|
|
|
|
"6:varint" => 0_i64,
|
|
|
|
|
},
|
|
|
|
|
"5:varint" => 20_i64,
|
|
|
|
|
},
|
|
|
|
|
}
|
2019-04-03 23:42:12 +00:00
|
|
|
|
|
2019-10-27 17:50:42 +00:00
|
|
|
|
case sort_by
|
|
|
|
|
when "top"
|
|
|
|
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
|
|
|
|
when "new", "newest"
|
|
|
|
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 1_i64
|
2020-04-09 17:18:09 +00:00
|
|
|
|
else # top
|
|
|
|
|
object["6:embedded"].as(Hash)["4:embedded"].as(Hash)["6:varint"] = 0_i64
|
2019-04-03 23:42:12 +00:00
|
|
|
|
end
|
|
|
|
|
|
2021-09-25 02:05:25 +00:00
|
|
|
|
continuation = object.try { |i| Protodec::Any.cast_json(i) }
|
2019-10-27 17:50:42 +00:00
|
|
|
|
.try { |i| Protodec::Any.from_json(i) }
|
|
|
|
|
.try { |i| Base64.urlsafe_encode(i) }
|
|
|
|
|
.try { |i| URI.encode_www_form(i) }
|
2019-04-03 23:42:12 +00:00
|
|
|
|
|
|
|
|
|
return continuation
|
|
|
|
|
end
|