Add http pool to reduce connection overhead
This commit is contained in:
parent
a180e5649c
commit
3bd0488c66
5 changed files with 62 additions and 20 deletions
|
@ -1,9 +1,11 @@
|
||||||
import httpclient, asyncdispatch, options, times, strutils, uri
|
import httpclient, asyncdispatch, options, times, strutils, uri
|
||||||
import packedjson
|
import packedjson
|
||||||
import types, tokens, consts, parserutils
|
import types, tokens, consts, parserutils, http_pool
|
||||||
|
|
||||||
const rl = "x-rate-limit-"
|
const rl = "x-rate-limit-"
|
||||||
|
|
||||||
|
var pool {.threadvar.}: HttpPool
|
||||||
|
|
||||||
proc genParams*(pars: openarray[(string, string)] = @[]; cursor="";
|
proc genParams*(pars: openarray[(string, string)] = @[]; cursor="";
|
||||||
count="20"; ext=true): seq[(string, string)] =
|
count="20"; ext=true): seq[(string, string)] =
|
||||||
result = timelineParams
|
result = timelineParams
|
||||||
|
@ -18,6 +20,7 @@ proc genParams*(pars: openarray[(string, string)] = @[]; cursor="";
|
||||||
|
|
||||||
proc genHeaders*(token: Token = nil): HttpHeaders =
|
proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||||
result = newHttpHeaders({
|
result = newHttpHeaders({
|
||||||
|
"connection": "keep-alive",
|
||||||
"authorization": auth,
|
"authorization": auth,
|
||||||
"content-type": "application/json",
|
"content-type": "application/json",
|
||||||
"x-guest-token": if token == nil: "" else: token.tok,
|
"x-guest-token": if token == nil: "" else: token.tok,
|
||||||
|
@ -29,14 +32,17 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||||
})
|
})
|
||||||
|
|
||||||
proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
|
proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
|
||||||
|
once:
|
||||||
|
pool = HttpPool()
|
||||||
|
|
||||||
var token = await getToken()
|
var token = await getToken()
|
||||||
if token.tok.len == 0:
|
if token.tok.len == 0:
|
||||||
result = newJNull()
|
result = newJNull()
|
||||||
|
|
||||||
var client = newAsyncHttpClient(headers=genHeaders(token))
|
let headers = genHeaders(token)
|
||||||
try:
|
try:
|
||||||
let
|
let
|
||||||
resp = await client.get($url)
|
resp = pool.use(headers): await c.get($url)
|
||||||
body = await resp.body
|
body = await resp.body
|
||||||
|
|
||||||
if body.startsWith('{') or body.startsWith('['):
|
if body.startsWith('{') or body.startsWith('['):
|
||||||
|
@ -54,5 +60,3 @@ proc fetch*(url: Uri; oldApi=false): Future[JsonNode] {.async.} =
|
||||||
except Exception:
|
except Exception:
|
||||||
echo "error: ", url
|
echo "error: ", url
|
||||||
result = newJNull()
|
result = newJNull()
|
||||||
finally:
|
|
||||||
client.close()
|
|
||||||
|
|
37
src/http_pool.nim
Normal file
37
src/http_pool.nim
Normal file
|
@ -0,0 +1,37 @@
|
||||||
|
import asyncdispatch, httpclient
|
||||||
|
|
||||||
|
type
|
||||||
|
HttpPool* = ref object
|
||||||
|
conns*: seq[AsyncHttpClient]
|
||||||
|
|
||||||
|
var maxConns {.threadvar.}: int
|
||||||
|
|
||||||
|
let keepAlive* = newHttpHeaders({
|
||||||
|
"Connection": "Keep-Alive"
|
||||||
|
})
|
||||||
|
|
||||||
|
proc setMaxHttpConns*(n: int) =
|
||||||
|
maxConns = n
|
||||||
|
|
||||||
|
proc release*(pool: HttpPool; client: AsyncHttpClient) =
|
||||||
|
if pool.conns.len >= maxConns:
|
||||||
|
client.close()
|
||||||
|
elif client != nil:
|
||||||
|
pool.conns.insert(client)
|
||||||
|
|
||||||
|
template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
|
||||||
|
var c {.inject.}: AsyncHttpClient
|
||||||
|
|
||||||
|
if pool.conns.len == 0:
|
||||||
|
c = newAsyncHttpClient(headers=heads)
|
||||||
|
else:
|
||||||
|
c = pool.conns.pop()
|
||||||
|
c.headers = heads
|
||||||
|
|
||||||
|
try:
|
||||||
|
body
|
||||||
|
except ProtocolError:
|
||||||
|
# Twitter closed the connection, retry
|
||||||
|
body
|
||||||
|
finally:
|
||||||
|
pool.release(c)
|
|
@ -3,7 +3,7 @@ from net import Port
|
||||||
|
|
||||||
import jester
|
import jester
|
||||||
|
|
||||||
import types, config, prefs, formatters, redis_cache, tokens
|
import types, config, prefs, formatters, redis_cache, http_pool, tokens
|
||||||
import views/[general, about]
|
import views/[general, about]
|
||||||
import routes/[
|
import routes/[
|
||||||
preferences, timeline, status, media, search, rss, list,
|
preferences, timeline, status, media, search, rss, list,
|
||||||
|
@ -25,6 +25,7 @@ updateDefaultPrefs(fullCfg)
|
||||||
setCacheTimes(cfg)
|
setCacheTimes(cfg)
|
||||||
setHmacKey(cfg.hmacKey)
|
setHmacKey(cfg.hmacKey)
|
||||||
setProxyEncoding(cfg.base64Media)
|
setProxyEncoding(cfg.base64Media)
|
||||||
|
setMaxHttpConns(100)
|
||||||
|
|
||||||
waitFor initRedisPool(cfg)
|
waitFor initRedisPool(cfg)
|
||||||
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
|
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
|
||||||
|
|
|
@ -114,7 +114,8 @@ proc parseVideo(js: JsonNode): Video =
|
||||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
|
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr,
|
||||||
available: js{"ext_media_availability", "status"}.getStr == "available",
|
available: js{"ext_media_availability", "status"}.getStr == "available",
|
||||||
title: js{"ext_alt_text"}.getStr,
|
title: js{"ext_alt_text"}.getStr,
|
||||||
durationMs: js{"duration_millis"}.getInt
|
durationMs: js{"duration_millis"}.getInt,
|
||||||
|
playbackType: mp4
|
||||||
)
|
)
|
||||||
|
|
||||||
with title, js{"additional_media_info", "title"}:
|
with title, js{"additional_media_info", "title"}:
|
||||||
|
|
|
@ -1,7 +1,8 @@
|
||||||
import asyncdispatch, httpclient, times, sequtils, strutils, json
|
import asyncdispatch, httpclient, times, sequtils, strutils, json
|
||||||
import types, agents, consts
|
import types, agents, consts, http_pool
|
||||||
|
|
||||||
var
|
var
|
||||||
|
clientPool {.threadvar.}: HttpPool
|
||||||
tokenPool {.threadvar.}: seq[Token]
|
tokenPool {.threadvar.}: seq[Token]
|
||||||
lastFailed: Time
|
lastFailed: Time
|
||||||
minFail = initDuration(seconds=10)
|
minFail = initDuration(seconds=10)
|
||||||
|
@ -10,22 +11,20 @@ proc fetchToken(): Future[Token] {.async.} =
|
||||||
if getTime() - lastFailed < minFail:
|
if getTime() - lastFailed < minFail:
|
||||||
return Token()
|
return Token()
|
||||||
|
|
||||||
let
|
let headers = newHttpHeaders({
|
||||||
headers = newHttpHeaders({
|
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
||||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
|
"accept-language": "en-US,en;q=0.5",
|
||||||
"accept-language": "en-US,en;q=0.5",
|
"connection": "keep-alive",
|
||||||
"connection": "keep-alive",
|
"user-agent": getAgent(),
|
||||||
"user-agent": getAgent(),
|
"authorization": auth
|
||||||
"authorization": auth
|
})
|
||||||
})
|
|
||||||
client = newAsyncHttpClient(headers=headers)
|
|
||||||
|
|
||||||
var
|
var
|
||||||
resp: string
|
resp: string
|
||||||
tok: string
|
tok: string
|
||||||
|
|
||||||
try:
|
try:
|
||||||
resp = await client.postContent(activate)
|
resp = clientPool.use(headers): await c.postContent(activate)
|
||||||
tok = parseJson(resp)["guest_token"].getStr
|
tok = parseJson(resp)["guest_token"].getStr
|
||||||
|
|
||||||
let time = getTime()
|
let time = getTime()
|
||||||
|
@ -35,8 +34,6 @@ proc fetchToken(): Future[Token] {.async.} =
|
||||||
lastFailed = getTime()
|
lastFailed = getTime()
|
||||||
echo "fetching token failed: ", e.msg
|
echo "fetching token failed: ", e.msg
|
||||||
result = Token()
|
result = Token()
|
||||||
finally:
|
|
||||||
client.close()
|
|
||||||
|
|
||||||
proc expired(token: Token): bool {.inline.} =
|
proc expired(token: Token): bool {.inline.} =
|
||||||
const
|
const
|
||||||
|
@ -74,6 +71,8 @@ proc poolTokens*(amount: int) {.async.} =
|
||||||
release(await token)
|
release(await token)
|
||||||
|
|
||||||
proc initTokenPool*(cfg: Config) {.async.} =
|
proc initTokenPool*(cfg: Config) {.async.} =
|
||||||
|
clientPool = HttpPool()
|
||||||
|
|
||||||
while true:
|
while true:
|
||||||
if tokenPool.countIt(not it.isLimited) < cfg.minTokens:
|
if tokenPool.countIt(not it.isLimited) < cfg.minTokens:
|
||||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
||||||
|
|
Loading…
Reference in a new issue