mirror of
https://gitea.invidious.io/iv-org/invidious-copy-2022-04-11.git
synced 2024-08-15 00:43:26 +00:00
Add '/api/v1/channels/search'
This commit is contained in:
parent
60038b29f1
commit
f80f4f2521
2 changed files with 103 additions and 8 deletions
101
src/invidious.cr
101
src/invidious.cr
|
@ -2598,7 +2598,104 @@ end
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
get "/api/v1/channels/search/:ucid" do |env|
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
|
||||||
|
ucid = env.params.url["ucid"]
|
||||||
|
|
||||||
|
query = env.params.query["q"]?
|
||||||
|
query ||= ""
|
||||||
|
|
||||||
|
page = env.params.query["page"]?.try &.to_i?
|
||||||
|
page ||= 1
|
||||||
|
|
||||||
|
count, search_results = channel_search(query, page, ucid)
|
||||||
|
response = JSON.build do |json|
|
||||||
|
json.array do
|
||||||
|
search_results.each do |item|
|
||||||
|
json.object do
|
||||||
|
case item
|
||||||
|
when SearchVideo
|
||||||
|
json.field "type", "video"
|
||||||
|
json.field "title", item.title
|
||||||
|
json.field "videoId", item.id
|
||||||
|
|
||||||
|
json.field "author", item.author
|
||||||
|
json.field "authorId", item.ucid
|
||||||
|
json.field "authorUrl", "/channel/#{item.ucid}"
|
||||||
|
|
||||||
|
json.field "videoThumbnails" do
|
||||||
|
generate_thumbnails(json, item.id)
|
||||||
|
end
|
||||||
|
|
||||||
|
json.field "description", item.description
|
||||||
|
json.field "descriptionHtml", item.description_html
|
||||||
|
|
||||||
|
json.field "viewCount", item.views
|
||||||
|
json.field "published", item.published.epoch
|
||||||
|
json.field "publishedText", "#{recode_date(item.published)} ago"
|
||||||
|
json.field "lengthSeconds", item.length_seconds
|
||||||
|
json.field "liveNow", item.live_now
|
||||||
|
when SearchPlaylist
|
||||||
|
json.field "type", "playlist"
|
||||||
|
json.field "title", item.title
|
||||||
|
json.field "playlistId", item.id
|
||||||
|
|
||||||
|
json.field "author", item.author
|
||||||
|
json.field "authorId", item.ucid
|
||||||
|
json.field "authorUrl", "/channel/#{item.ucid}"
|
||||||
|
|
||||||
|
json.field "videos" do
|
||||||
|
json.array do
|
||||||
|
item.videos.each do |video|
|
||||||
|
json.object do
|
||||||
|
json.field "title", video.title
|
||||||
|
json.field "videoId", video.id
|
||||||
|
json.field "lengthSeconds", video.length_seconds
|
||||||
|
|
||||||
|
json.field "videoThumbnails" do
|
||||||
|
generate_thumbnails(json, video.id)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
when SearchChannel
|
||||||
|
json.field "type", "channel"
|
||||||
|
json.field "author", item.author
|
||||||
|
json.field "authorId", item.ucid
|
||||||
|
json.field "authorUrl", "/channel/#{item.ucid}"
|
||||||
|
|
||||||
|
json.field "authorThumbnails" do
|
||||||
|
json.array do
|
||||||
|
qualities = [32, 48, 76, 100, 176, 512]
|
||||||
|
|
||||||
|
qualities.each do |quality|
|
||||||
|
json.object do
|
||||||
|
json.field "url", item.author_thumbnail.gsub("=s176-", "=s#{quality}-")
|
||||||
|
json.field "width", quality
|
||||||
|
json.field "height", quality
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
json.field "subCount", item.subscriber_count
|
||||||
|
json.field "videoCount", item.video_count
|
||||||
|
json.field "description", item.description
|
||||||
|
json.field "descriptionHtml", item.description_html
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
response
|
||||||
|
end
|
||||||
|
|
||||||
get "/api/v1/search" do |env|
|
get "/api/v1/search" do |env|
|
||||||
|
env.response.content_type = "application/json"
|
||||||
|
|
||||||
query = env.params.query["q"]?
|
query = env.params.query["q"]?
|
||||||
query ||= ""
|
query ||= ""
|
||||||
|
|
||||||
|
@ -2621,8 +2718,6 @@ get "/api/v1/search" do |env|
|
||||||
content_type = env.params.query["type"]?.try &.downcase
|
content_type = env.params.query["type"]?.try &.downcase
|
||||||
content_type ||= "video"
|
content_type ||= "video"
|
||||||
|
|
||||||
env.response.content_type = "application/json"
|
|
||||||
|
|
||||||
begin
|
begin
|
||||||
search_params = produce_search_params(sort_by, date, content_type, duration, features)
|
search_params = produce_search_params(sort_by, date, content_type, duration, features)
|
||||||
rescue ex
|
rescue ex
|
||||||
|
@ -2634,9 +2729,9 @@ get "/api/v1/search" do |env|
|
||||||
end
|
end
|
||||||
end
|
end
|
||||||
|
|
||||||
|
count, search_results = search(query, page, search_params).as(Tuple)
|
||||||
response = JSON.build do |json|
|
response = JSON.build do |json|
|
||||||
json.array do
|
json.array do
|
||||||
count, search_results = search(query, page, search_params).as(Tuple)
|
|
||||||
search_results.each do |item|
|
search_results.each do |item|
|
||||||
json.object do
|
json.object do
|
||||||
case item
|
case item
|
||||||
|
|
|
@ -220,7 +220,7 @@ def extract_items(nodeset, ucid = nil)
|
||||||
author = ""
|
author = ""
|
||||||
author_id = ""
|
author_id = ""
|
||||||
else
|
else
|
||||||
author = anchor.content
|
author = anchor.content.strip
|
||||||
author_id = anchor["href"].split("/")[-1]
|
author_id = anchor["href"].split("/")[-1]
|
||||||
end
|
end
|
||||||
|
|
||||||
|
@ -234,7 +234,7 @@ def extract_items(nodeset, ucid = nil)
|
||||||
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
description_html = node.xpath_node(%q(.//div[contains(@class, "yt-lockup-description")]))
|
||||||
description_html, description = html_to_content(description_html)
|
description_html, description = html_to_content(description_html)
|
||||||
|
|
||||||
case node.xpath_node(%q(.//div)).not_nil!["class"]
|
case node.xpath_node(%q(.//div[contains(@class, "yt-lockup-tile")])).not_nil!["class"]
|
||||||
when .includes? "yt-lockup-playlist"
|
when .includes? "yt-lockup-playlist"
|
||||||
plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"]
|
plid = HTTP::Params.parse(URI.parse(id).query.not_nil!)["list"]
|
||||||
|
|
||||||
|
@ -245,10 +245,10 @@ def extract_items(nodeset, ucid = nil)
|
||||||
video_count ||= 0
|
video_count ||= 0
|
||||||
|
|
||||||
videos = [] of SearchPlaylistVideo
|
videos = [] of SearchPlaylistVideo
|
||||||
node.xpath_nodes(%q(.//ol[contains(@class, "yt-lockup-playlist-items")]/li)).each do |video|
|
node.xpath_nodes(%q(.//*[contains(@class, "yt-lockup-playlist-items")]/li)).each do |video|
|
||||||
anchor = video.xpath_node(%q(.//a))
|
anchor = video.xpath_node(%q(.//a))
|
||||||
if anchor
|
if anchor
|
||||||
video_title = anchor.content
|
video_title = anchor.content.strip
|
||||||
id = HTTP::Params.parse(URI.parse(anchor["href"]).query.not_nil!)["v"]
|
id = HTTP::Params.parse(URI.parse(anchor["href"]).query.not_nil!)["v"]
|
||||||
end
|
end
|
||||||
video_title ||= ""
|
video_title ||= ""
|
||||||
|
@ -276,7 +276,7 @@ def extract_items(nodeset, ucid = nil)
|
||||||
videos
|
videos
|
||||||
)
|
)
|
||||||
when .includes? "yt-lockup-channel"
|
when .includes? "yt-lockup-channel"
|
||||||
author = title
|
author = title.strip
|
||||||
ucid = id.split("/")[-1]
|
ucid = id.split("/")[-1]
|
||||||
|
|
||||||
author_thumbnail = node.xpath_node(%q(.//div/span/img)).try &.["data-thumb"]?
|
author_thumbnail = node.xpath_node(%q(.//div/span/img)).try &.["data-thumb"]?
|
||||||
|
|
Loading…
Reference in a new issue