mirror of
https://gitea.invidious.io/iv-org/invidious-copy-2023-06-08.git
synced 2024-08-15 00:53:38 +00:00
Merge pull request #1600 from jksladjflkjsadflkjsadf/closeclients
Close http clients after use
This commit is contained in:
commit
82c8f3b556
6 changed files with 30 additions and 11 deletions
|
@ -2133,14 +2133,13 @@ get "/api/v1/annotations/:id" do |env|
|
|||
|
||||
file = URI.encode_www_form("#{id[0, 3]}/#{id}.xml")
|
||||
|
||||
client = make_client(ARCHIVE_URL)
|
||||
location = client.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}")
|
||||
location = make_client(ARCHIVE_URL, &.get("/download/youtubeannotations_#{index}/#{id[0, 2]}.tar/#{file}"))
|
||||
|
||||
if !location.headers["Location"]?
|
||||
env.response.status_code = location.status_code
|
||||
end
|
||||
|
||||
response = make_client(URI.parse(location.headers["Location"])).get(location.headers["Location"])
|
||||
response = make_client(URI.parse(location.headers["Location"]), &.get(location.headers["Location"]))
|
||||
|
||||
if response.body.empty?
|
||||
env.response.status_code = 404
|
||||
|
@ -3498,8 +3497,12 @@ get "/videoplayback" do |env|
|
|||
location = URI.parse(response.headers["Location"])
|
||||
env.response.headers["Access-Control-Allow-Origin"] = "*"
|
||||
|
||||
host = "#{location.scheme}://#{location.host}"
|
||||
client = make_client(URI.parse(host), region)
|
||||
new_host = "#{location.scheme}://#{location.host}"
|
||||
if new_host != host
|
||||
host = new_host
|
||||
client.close
|
||||
client = make_client(URI.parse(new_host), region)
|
||||
end
|
||||
|
||||
url = "#{location.full_path}&host=#{location.host}#{region ? "®ion=#{region}" : ""}"
|
||||
else
|
||||
|
@ -3530,7 +3533,6 @@ get "/videoplayback" do |env|
|
|||
end
|
||||
|
||||
begin
|
||||
client = make_client(URI.parse(host), region)
|
||||
client.get(url, headers) do |response|
|
||||
response.headers.each do |key, value|
|
||||
if !RESPONSE_HEADERS_BLACKLIST.includes?(key.downcase)
|
||||
|
@ -3571,8 +3573,6 @@ get "/videoplayback" do |env|
|
|||
chunk_end = chunk_start + HTTP_CHUNK_SIZE - 1
|
||||
end
|
||||
|
||||
client = make_client(URI.parse(host), region)
|
||||
|
||||
# TODO: Record bytes written so we can restart after a chunk fails
|
||||
while true
|
||||
if !range_end && content_length
|
||||
|
@ -3636,6 +3636,7 @@ get "/videoplayback" do |env|
|
|||
if ex.message != "Error reading socket: Connection reset by peer"
|
||||
break
|
||||
else
|
||||
client.close
|
||||
client = make_client(URI.parse(host), region)
|
||||
end
|
||||
end
|
||||
|
@ -3645,6 +3646,7 @@ get "/videoplayback" do |env|
|
|||
first_chunk = false
|
||||
end
|
||||
end
|
||||
client.close
|
||||
end
|
||||
|
||||
get "/ggpht/*" do |env|
|
||||
|
|
|
@ -269,6 +269,8 @@ def fetch_reddit_comments(id, sort_by = "confidence")
|
|||
raise InfoException.new("Could not fetch comments")
|
||||
end
|
||||
|
||||
client.close
|
||||
|
||||
comments = result[1].data.as(RedditListing).children
|
||||
return comments, thread
|
||||
end
|
||||
|
|
|
@ -108,7 +108,9 @@ def filter_proxies(proxies)
|
|||
proxy = HTTPProxy.new(proxy_host: proxy[:ip], proxy_port: proxy[:port])
|
||||
client.set_proxy(proxy)
|
||||
|
||||
client.head("/").status_code == 200
|
||||
status_ok = client.head("/").status_code == 200
|
||||
client.close
|
||||
status_ok
|
||||
rescue ex
|
||||
false
|
||||
end
|
||||
|
@ -132,6 +134,7 @@ def get_nova_proxies(country_code = "US")
|
|||
headers["Referer"] = "https://www.proxynova.com/proxy-server-list/country-#{country_code}/"
|
||||
|
||||
response = client.get("/proxy-server-list/country-#{country_code}/", headers)
|
||||
client.close
|
||||
document = XML.parse_html(response.body)
|
||||
|
||||
proxies = [] of {ip: String, port: Int32, score: Float64}
|
||||
|
@ -177,6 +180,7 @@ def get_spys_proxies(country_code = "US")
|
|||
}
|
||||
|
||||
response = client.post("/free-proxy-list/#{country_code}/", headers, form: body)
|
||||
client.close
|
||||
20.times do
|
||||
if response.status_code == 200
|
||||
break
|
||||
|
|
|
@ -101,6 +101,15 @@ def make_client(url : URI, region = nil)
|
|||
return client
|
||||
end
|
||||
|
||||
def make_client(url : URI, region = nil, &block)
|
||||
client = make_client(url, region)
|
||||
begin
|
||||
yield client
|
||||
ensure
|
||||
client.close
|
||||
end
|
||||
end
|
||||
|
||||
def decode_length_seconds(string)
|
||||
length_seconds = string.gsub(/[^0-9:]/, "").split(":").map &.to_i
|
||||
length_seconds = [0] * (3 - length_seconds.size) + length_seconds
|
||||
|
@ -361,7 +370,7 @@ def subscribe_pubsub(topic, key, config)
|
|||
"hub.secret" => key.to_s,
|
||||
}
|
||||
|
||||
return make_client(PUBSUB_URL).post("/subscribe", form: body)
|
||||
return make_client(PUBSUB_URL, &.post("/subscribe", form: body))
|
||||
end
|
||||
|
||||
def parse_range(range)
|
||||
|
|
|
@ -91,6 +91,8 @@ class Invidious::Jobs::BypassCaptchaJob < Invidious::Jobs::BaseJob
|
|||
},
|
||||
}.to_json).body)
|
||||
|
||||
captcha_client.close
|
||||
|
||||
raise response["error"].as_s if response["error"]?
|
||||
task_id = response["taskId"].as_i
|
||||
|
||||
|
|
|
@ -427,7 +427,7 @@ def generate_captcha(key, db)
|
|||
end
|
||||
|
||||
def generate_text_captcha(key, db)
|
||||
response = make_client(TEXTCAPTCHA_URL).get("/omarroth@protonmail.com.json").body
|
||||
response = make_client(TEXTCAPTCHA_URL, &.get("/omarroth@protonmail.com.json").body)
|
||||
response = JSON.parse(response)
|
||||
|
||||
tokens = response["a"].as_a.map do |answer|
|
||||
|
|
Loading…
Reference in a new issue