mirror of
https://gitea.invidious.io/iv-org/invidious-copy-2022-03-16.git
synced 2024-08-15 00:53:18 +00:00
Multiple youtube_api.cr helper fixes
Add documentation Bump web client version string Add charset=UTF-8 to the 'content-type' header Parse JSON and return it as a Hash Handle API error messages
This commit is contained in:
parent
8bbb016fa4
commit
43bd331e48
4 changed files with 36 additions and 38 deletions
|
@ -229,22 +229,8 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
|||
page = 1
|
||||
|
||||
LOGGER.trace("fetch_channel: #{ucid} : Downloading channel videos page")
|
||||
response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
|
||||
|
||||
videos = [] of SearchVideo
|
||||
begin
|
||||
initial_data = JSON.parse(response_body)
|
||||
raise InfoException.new("Could not extract channel JSON") if !initial_data
|
||||
|
||||
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel videos page initial_data")
|
||||
videos = extract_videos(initial_data.as_h, author, ucid)
|
||||
rescue ex
|
||||
if response_body.includes?("To continue with your YouTube experience, please fill out the form below.") ||
|
||||
response_body.includes?("https://www.google.com/sorry/index")
|
||||
raise InfoException.new("Could not extract channel info. Instance is likely blocked.")
|
||||
end
|
||||
raise ex
|
||||
end
|
||||
initial_data = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
|
||||
videos = extract_videos(initial_data, author, ucid)
|
||||
|
||||
LOGGER.trace("fetch_channel: #{ucid} : Extracting videos from channel RSS feed")
|
||||
rss.xpath_nodes("//feed/entry").each do |entry|
|
||||
|
@ -304,10 +290,8 @@ def fetch_channel(ucid, db, pull_all_videos = true, locale = nil)
|
|||
ids = [] of String
|
||||
|
||||
loop do
|
||||
response_body = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
|
||||
initial_data = JSON.parse(response_body)
|
||||
raise InfoException.new("Could not extract channel JSON") if !initial_data
|
||||
videos = extract_videos(initial_data.as_h, author, ucid)
|
||||
initial_data = get_channel_videos_response(ucid, page, auto_generated: auto_generated)
|
||||
videos = extract_videos(initial_data, author, ucid)
|
||||
|
||||
count = videos.size
|
||||
videos = videos.map { |video| ChannelVideo.new({
|
||||
|
@ -358,8 +342,7 @@ end
|
|||
def fetch_channel_playlists(ucid, author, continuation, sort_by)
|
||||
if continuation
|
||||
response_json = request_youtube_api_browse(continuation)
|
||||
result = JSON.parse(response_json)
|
||||
continuationItems = result["onResponseReceivedActions"]?
|
||||
continuationItems = response_json["onResponseReceivedActions"]?
|
||||
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
||||
|
||||
return [] of SearchItem, nil if !continuationItems
|
||||
|
@ -964,21 +947,16 @@ def get_60_videos(ucid, author, page, auto_generated, sort_by = "newest")
|
|||
videos = [] of SearchVideo
|
||||
|
||||
2.times do |i|
|
||||
response_json = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
||||
initial_data = JSON.parse(response_json)
|
||||
break if !initial_data
|
||||
videos.concat extract_videos(initial_data.as_h, author, ucid)
|
||||
initial_data = get_channel_videos_response(ucid, page * 2 + (i - 1), auto_generated: auto_generated, sort_by: sort_by)
|
||||
videos.concat extract_videos(initial_data, author, ucid)
|
||||
end
|
||||
|
||||
return videos.size, videos
|
||||
end
|
||||
|
||||
def get_latest_videos(ucid)
|
||||
response_json = get_channel_videos_response(ucid)
|
||||
initial_data = JSON.parse(response_json)
|
||||
return [] of SearchVideo if !initial_data
|
||||
initial_data = get_channel_videos_response(ucid)
|
||||
author = initial_data["metadata"]?.try &.["channelMetadataRenderer"]?.try &.["title"]?.try &.as_s
|
||||
items = extract_videos(initial_data.as_h, author, ucid)
|
||||
|
||||
return items
|
||||
return extract_videos(initial_data, author, ucid)
|
||||
end
|
||||
|
|
|
@ -4,8 +4,18 @@
|
|||
|
||||
# Hard-coded constants required by the API
|
||||
HARDCODED_API_KEY = "AIzaSyAO_FJ2SlqU8Q4STEHLGCilw_Y9_11qcW8"
|
||||
HARDCODED_CLIENT_VERS = "2.20210318.08.00"
|
||||
HARDCODED_CLIENT_VERS = "2.20210330.08.00"
|
||||
|
||||
####################################################################
|
||||
# request_youtube_api_browse(continuation)
|
||||
#
|
||||
# Requests the youtubei/vi/browse endpoint with the required headers
|
||||
# to get JSON in en-US (english US).
|
||||
#
|
||||
# The requested data is a continuation token (ctoken). Depending on
|
||||
# this token's contents, the returned data can be comments, playlist
|
||||
# videos, search results, channel community tab, ...
|
||||
#
|
||||
def request_youtube_api_browse(continuation)
|
||||
# JSON Request data, required by the API
|
||||
data = {
|
||||
|
@ -20,12 +30,23 @@ def request_youtube_api_browse(continuation)
|
|||
"continuation": continuation,
|
||||
}
|
||||
|
||||
# Send the POST request and return result
|
||||
# Send the POST request and parse result
|
||||
response = YT_POOL.client &.post(
|
||||
"/youtubei/v1/browse?key=#{HARDCODED_API_KEY}",
|
||||
headers: HTTP::Headers{"content-type" => "application/json"},
|
||||
headers: HTTP::Headers{"content-type" => "application/json; charset=UTF-8"},
|
||||
body: data.to_json
|
||||
)
|
||||
|
||||
return response.body
|
||||
initial_data = JSON.parse(response.body).as_h
|
||||
|
||||
# Error handling
|
||||
if initial_data.has_key?("error")
|
||||
code = initial_data["error"]["code"]
|
||||
message = initial_data["error"]["message"].to_s.sub(/(\\n)+\^$/, "")
|
||||
|
||||
raise InfoException.new("Could not extract JSON. Youtube API returned \
|
||||
error #{code} with message:<br>\"#{message}\"")
|
||||
end
|
||||
|
||||
return initial_data
|
||||
end
|
||||
|
|
|
@ -451,7 +451,7 @@ def get_playlist_videos(db, playlist, offset, locale = nil, continuation = nil)
|
|||
offset = (offset / 100).to_i64 * 100_i64
|
||||
|
||||
ctoken = produce_playlist_continuation(playlist.id, offset)
|
||||
initial_data = JSON.parse(request_youtube_api_browse(ctoken)).as_h
|
||||
initial_data = request_youtube_api_browse(ctoken)
|
||||
else
|
||||
response = YT_POOL.client &.get("/playlist?list=#{playlist.id}&gl=US&hl=en")
|
||||
initial_data = extract_initial_data(response.body)
|
||||
|
|
|
@ -246,8 +246,7 @@ def channel_search(query, page, channel)
|
|||
continuation = produce_channel_search_continuation(ucid, query, page)
|
||||
response_json = request_youtube_api_browse(continuation)
|
||||
|
||||
result = JSON.parse(response_json)
|
||||
continuationItems = result["onResponseReceivedActions"]?
|
||||
continuationItems = response_json["onResponseReceivedActions"]?
|
||||
.try &.[0]["appendContinuationItemsAction"]["continuationItems"]
|
||||
|
||||
return 0, [] of SearchItem if !continuationItems
|
||||
|
|
Loading…
Reference in a new issue