mirror of
https://gitea.invidious.io/iv-org/invidious-copy-2022-04-11.git
synced 2024-08-15 00:43:26 +00:00
Use fibers to try to speed up importing of channels
This commit is contained in:
parent
7951d4c8aa
commit
c2c224b16f
3 changed files with 39 additions and 42 deletions
|
@ -1355,7 +1355,7 @@ get "/subscription_manager" do |env|
|
||||||
subscriptions = [] of InvidiousChannel
|
subscriptions = [] of InvidiousChannel
|
||||||
user.subscriptions.each do |ucid|
|
user.subscriptions.each do |ucid|
|
||||||
begin
|
begin
|
||||||
subscriptions << get_channel(ucid, PG_DB, false)
|
subscriptions << get_channel(ucid, PG_DB, false, false)
|
||||||
rescue ex
|
rescue ex
|
||||||
next
|
next
|
||||||
end
|
end
|
||||||
|
@ -1475,14 +1475,7 @@ post "/data_control" do |env|
|
||||||
end
|
end
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions.select! do |ucid|
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
begin
|
|
||||||
get_channel(ucid, PG_DB, false, false)
|
|
||||||
true
|
|
||||||
rescue ex
|
|
||||||
false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
||||||
when "import_freetube"
|
when "import_freetube"
|
||||||
|
@ -1491,14 +1484,7 @@ post "/data_control" do |env|
|
||||||
end
|
end
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions.select! do |ucid|
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
begin
|
|
||||||
get_channel(ucid, PG_DB, false, false)
|
|
||||||
true
|
|
||||||
rescue ex
|
|
||||||
false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
||||||
when "import_newpipe_subscriptions"
|
when "import_newpipe_subscriptions"
|
||||||
|
@ -1508,13 +1494,7 @@ post "/data_control" do |env|
|
||||||
end
|
end
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions.each do |ucid|
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
begin
|
|
||||||
get_channel(ucid, PG_DB, false, false)
|
|
||||||
rescue ex
|
|
||||||
next
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
||||||
when "import_newpipe"
|
when "import_newpipe"
|
||||||
|
@ -1533,14 +1513,7 @@ post "/data_control" do |env|
|
||||||
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
|
user.subscriptions += db.query_all("SELECT url FROM subscriptions", as: String).map { |url| url.lchop("https://www.youtube.com/channel/") }
|
||||||
user.subscriptions.uniq!
|
user.subscriptions.uniq!
|
||||||
|
|
||||||
user.subscriptions.select! do |ucid|
|
user.subscriptions = get_batch_channels(user.subscriptions, PG_DB, false, false)
|
||||||
begin
|
|
||||||
get_channel(ucid, PG_DB, false, false)
|
|
||||||
true
|
|
||||||
rescue ex
|
|
||||||
false
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
PG_DB.exec("UPDATE users SET subscriptions = $1 WHERE email = $2", user.subscriptions, user.email)
|
||||||
|
|
||||||
|
|
|
@ -21,6 +21,33 @@ class ChannelVideo
|
||||||
})
|
})
|
||||||
end
|
end
|
||||||
|
|
||||||
|
def get_batch_channels(channels, db, refresh = false, pull_all_videos = true, max_threads = 10)
|
||||||
|
active_threads = 0
|
||||||
|
active_channel = Channel(String | Nil).new
|
||||||
|
|
||||||
|
final = [] of String
|
||||||
|
channels.map do |ucid|
|
||||||
|
if active_threads >= max_threads
|
||||||
|
if response = active_channel.receive
|
||||||
|
active_threads -= 1
|
||||||
|
final << response
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
active_threads += 1
|
||||||
|
spawn do
|
||||||
|
begin
|
||||||
|
get_channel(ucid, db, refresh, pull_all_videos)
|
||||||
|
active_channel.send(ucid)
|
||||||
|
rescue ex
|
||||||
|
active_channel.send(nil)
|
||||||
|
end
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
return final
|
||||||
|
end
|
||||||
|
|
||||||
def get_channel(id, db, refresh = true, pull_all_videos = true)
|
def get_channel(id, db, refresh = true, pull_all_videos = true)
|
||||||
client = make_client(YT_URL)
|
client = make_client(YT_URL)
|
||||||
|
|
||||||
|
|
|
@ -177,18 +177,15 @@ def fetch_user(sid, headers, db)
|
||||||
feed = XML.parse_html(feed.body)
|
feed = XML.parse_html(feed.body)
|
||||||
|
|
||||||
channels = [] of String
|
channels = [] of String
|
||||||
feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).each do |channel|
|
channels = feed.xpath_nodes(%q(//ul[@id="guide-channels"]/li/a)).compact_map do |channel|
|
||||||
if !{"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? channel["title"]
|
if {"Popular on YouTube", "Music", "Sports", "Gaming"}.includes? channel["title"]
|
||||||
channel_id = channel["href"].lstrip("/channel/")
|
nil
|
||||||
|
else
|
||||||
|
channel["href"].lstrip("/channel/")
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
begin
|
channels = get_batch_channels(channels, db, false, false)
|
||||||
channel = get_channel(channel_id, db, false, false)
|
|
||||||
channels << channel.id
|
|
||||||
rescue ex
|
|
||||||
next
|
|
||||||
end
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
|
email = feed.xpath_node(%q(//a[@class="yt-masthead-picker-header yt-masthead-picker-active-account"]))
|
||||||
if email
|
if email
|
||||||
|
|
Loading…
Reference in a new issue