diff --git a/nitter.nimble b/nitter.nimble index 7771b31..e6a1909 100644 --- a/nitter.nimble +++ b/nitter.nimble @@ -23,7 +23,7 @@ requires "https://github.com/zedeus/redis#d0a0e6f" requires "zippy#ca5989a" requires "flatty#e668085" requires "jsony#ea811be" - +requires "oauth#b8c163b" # Tasks diff --git a/src/api.nim b/src/api.nim index c313aa2..d6a4564 100644 --- a/src/api.nim +++ b/src/api.nim @@ -33,23 +33,6 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi js = await fetch(url ? params, apiId) result = parseGraphTimeline(js, "user", after) -# proc getTimeline*(id: string; after=""; replies=false): Future[Profile] {.async.} = -# if id.len == 0: return -# let -# ps = genParams({"userId": id, "include_tweet_replies": $replies}, after) -# url = oldUserTweets / (id & ".json") ? ps -# result = parseTimeline(await fetch(url, Api.timeline), after) - -proc getUserTimeline*(id: string; after=""): Future[Profile] {.async.} = - var ps = genParams({"id": id}) - if after.len > 0: - ps.add ("down_cursor", after) - - let - url = legacyUserTweets ? ps - js = await fetch(url, Api.userTimeline) - result = parseUserTimeline(js, after) - proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} = if id.len == 0: return let @@ -112,10 +95,10 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} = if after.len > 0: result.replies = await getReplies(id, after) -proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} = +proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} = let q = genQueryParam(query) if q.len == 0 or q == emptyQuery: - return Profile(tweets: Timeline(query: query, beginning: true)) + return Timeline(query: query, beginning: true) var variables = %*{ @@ -129,44 +112,29 @@ proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} = if after.len > 0: variables["cursor"] = % after let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures} - result = Profile(tweets: parseGraphSearch(await fetch(url, Api.search), after)) - result.tweets.query = query - -proc getTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} = - var q = genQueryParam(query) - - if q.len == 0 or q == emptyQuery: - return Timeline(query: query, beginning: true) - - if after.len > 0: - q &= " max_id:" & after - - let url = tweetSearch ? genParams({ - "q": q , - "modules": "status", - "result_type": "recent", - }) - - result = parseTweetSearch(await fetch(url, Api.search), after) + result = parseGraphSearch[Tweets](await fetch(url, Api.search), after) result.query = query -proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} = +proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} = if query.text.len == 0: return Result[User](query: query, beginning: true) - var url = userSearch ? { - "q": query.text, - "skip_status": "1", - "count": "20", - "page": page - } + var + variables = %*{ + "rawQuery": query.text, + "count": 20, + "product": "People", + "withDownvotePerspective": false, + "withReactionsMetadata": false, + "withReactionsPerspective": false + } + if after.len > 0: + variables["cursor"] = % after + result.beginning = false - result = parseUsers(await fetchRaw(url, Api.userSearch)) + let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures} + result = parseGraphSearch[User](await fetch(url, Api.search), after) result.query = query - if page.len == 0: - result.bottom = "2" - elif page.allCharsInSet(Digits): - result.bottom = $(parseInt(page) + 1) proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} = if name.len == 0: return diff --git a/src/apiutils.nim b/src/apiutils.nim index 1da971a..d1ecfa3 100644 --- a/src/apiutils.nim +++ b/src/apiutils.nim @@ -1,6 +1,6 @@ # SPDX-License-Identifier: AGPL-3.0-only -import httpclient, asyncdispatch, options, strutils, uri -import jsony, packedjson, zippy +import httpclient, asyncdispatch, options, strutils, uri, times, math +import jsony, packedjson, zippy, oauth1 import types, tokens, consts, parserutils, http_pool import experimental/types/common @@ -29,12 +29,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor=""; else: result &= ("cursor", cursor) -proc genHeaders*(token: Token = nil): HttpHeaders = +proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string = + let + encodedUrl = url.replace(",", "%2C").replace("+", "%20") + params = OAuth1Parameters( + consumerKey: consumerKey, + signatureMethod: "HMAC-SHA1", + timestamp: $int(round(epochTime())), + nonce: "0", + isIncludeVersionToHeader: true, + token: oauthToken + ) + signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret) + + params.signature = percentEncode(signature) + + return getOauth1RequestHeader(params)["authorization"] + +proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders = + let header = getOauthHeader(url, oauthToken, oauthTokenSecret) + result = newHttpHeaders({ "connection": "keep-alive", - "authorization": auth, + "authorization": header, "content-type": "application/json", - "x-guest-token": if token == nil: "" else: token.tok, "x-twitter-active-user": "yes", "authority": "api.twitter.com", "accept-encoding": "gzip", @@ -43,24 +61,24 @@ proc genHeaders*(token: Token = nil): HttpHeaders = "DNT": "1" }) -template updateToken() = +template updateAccount() = if resp.headers.hasKey(rlRemaining): let remaining = parseInt(resp.headers[rlRemaining]) reset = parseInt(resp.headers[rlReset]) - token.setRateLimit(api, remaining, reset) + account.setRateLimit(api, remaining, reset) template fetchImpl(result, fetchBody) {.dirty.} = once: pool = HttpPool() - var token = await getToken(api) - if token.tok.len == 0: + var account = await getGuestAccount(api) + if account.oauthToken.len == 0: raise rateLimitError() try: var resp: AsyncResponse - pool.use(genHeaders(token)): + pool.use(genHeaders($url, account.oauthToken, account.oauthSecret)): template getContent = resp = await c.get($url) result = await resp.body @@ -79,19 +97,19 @@ template fetchImpl(result, fetchBody) {.dirty.} = fetchBody - release(token, used=true) + release(account, used=true) if resp.status == $Http400: raise newException(InternalError, $url) except InternalError as e: raise e except BadClientError as e: - release(token, used=true) + release(account, used=true) raise e except Exception as e: - echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url + echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", account.id, ", url: ", url if "length" notin e.msg and "descriptor" notin e.msg: - release(token, invalid=true) + release(account, invalid=true) raise rateLimitError() proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} = @@ -103,12 +121,12 @@ proc fetch*(url: Uri; api: Api): Future[JsonNode] {.async.} = echo resp.status, ": ", body, " --- url: ", url result = newJNull() - updateToken() + updateAccount() let error = result.getError if error in {invalidToken, badToken}: echo "fetch error: ", result.getError - release(token, invalid=true) + release(account, invalid=true) raise rateLimitError() proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} = @@ -117,11 +135,11 @@ proc fetchRaw*(url: Uri; api: Api): Future[string] {.async.} = echo resp.status, ": ", result, " --- url: ", url result.setLen(0) - updateToken() + updateAccount() if result.startsWith("{\"errors"): let errors = result.fromJson(Errors) if errors in {invalidToken, badToken}: echo "fetch error: ", errors - release(token, invalid=true) + release(account, invalid=true) raise rateLimitError() diff --git a/src/consts.nim b/src/consts.nim index a25f6ea..2cfd1ed 100644 --- a/src/consts.nim +++ b/src/consts.nim @@ -2,17 +2,13 @@ import uri, sequtils, strutils const - auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF" + consumerKey* = "3nVuSoBZnx6U4vzUxf5w" + consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys" api = parseUri("https://api.twitter.com") activate* = $(api / "1.1/guest/activate.json") - legacyUserTweets* = api / "1.1/timeline/user.json" photoRail* = api / "1.1/statuses/media_timeline.json" - userSearch* = api / "1.1/users/search.json" - tweetSearch* = api / "1.1/search/universal.json" - - # oldUserTweets* = api / "2/timeline/profile" graphql = api / "graphql" graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery" diff --git a/src/nitter.nim b/src/nitter.nim index 25a569d..4a4ec13 100644 --- a/src/nitter.nim +++ b/src/nitter.nim @@ -3,6 +3,7 @@ import asyncdispatch, strformat, logging from net import Port from htmlgen import a from os import getEnv +from json import parseJson import jester @@ -15,8 +16,14 @@ import routes/[ const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances" const issuesUrl = "https://github.com/zedeus/nitter/issues" -let configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf") -let (cfg, fullCfg) = getConfig(configPath) +let + configPath = getEnv("NITTER_CONF_FILE", "./nitter.conf") + (cfg, fullCfg) = getConfig(configPath) + + accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json") + accounts = parseJson(readFile(accountsPath)) + +initAccountPool(cfg, parseJson(readFile(accountsPath))) if not cfg.enableDebug: # Silence Jester's query warning @@ -38,8 +45,6 @@ waitFor initRedisPool(cfg) stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n" stdout.flushFile -asyncCheck initTokenPool(cfg) - createUnsupportedRouter(cfg) createResolverRouter(cfg) createPrefRouter(cfg) diff --git a/src/parser.nim b/src/parser.nim index c7d8bd1..9262b28 100644 --- a/src/parser.nim +++ b/src/parser.nim @@ -29,7 +29,9 @@ proc parseUser(js: JsonNode; id=""): User = result.expandUserEntities(js) proc parseGraphUser(js: JsonNode): User = - let user = ? js{"user_result", "result"} + var user = js{"user_result", "result"} + if user.isNull: + user = ? js{"user_results", "result"} result = parseUser(user{"legacy"}) if "is_blue_verified" in user: @@ -287,169 +289,6 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet = result.text.removeSuffix(" Learn more.") result.available = false -proc parseLegacyTweet(js: JsonNode): Tweet = - result = parseTweet(js, js{"card"}) - if not result.isNil and result.available: - result.user = parseUser(js{"user"}) - - if result.quote.isSome: - result.quote = some parseLegacyTweet(js{"quoted_status"}) - -proc parseTweetSearch*(js: JsonNode; after=""): Timeline = - result.beginning = after.len == 0 - - if js.kind == JNull or "modules" notin js or js{"modules"}.len == 0: - return - - for item in js{"modules"}: - with tweet, item{"status", "data"}: - let parsed = parseLegacyTweet(tweet) - - if parsed.retweet.isSome: - parsed.retweet = some parseLegacyTweet(tweet{"retweeted_status"}) - - result.content.add @[parsed] - - if result.content.len > 0: - result.bottom = $(result.content[^1][0].id - 1) - -proc parseUserTimelineTweet(tweet: JsonNode; users: TableRef[string, User]): Tweet = - result = parseTweet(tweet, tweet{"card"}) - - if result.isNil or not result.available: - return - - with user, tweet{"user"}: - let userId = user{"id_str"}.getStr - if user{"ext_is_blue_verified"}.getBool(false): - users[userId].verified = users[userId].verified or true - result.user = users[userId] - -proc parseUserTimeline*(js: JsonNode; after=""): Profile = - result = Profile(tweets: Timeline(beginning: after.len == 0)) - - if js.kind == JNull or "response" notin js or "twitter_objects" notin js: - return - - var users = newTable[string, User]() - for userId, user in js{"twitter_objects", "users"}: - users[userId] = parseUser(user) - - for entity in js{"response", "timeline"}: - let - tweetId = entity{"tweet", "id"}.getId - isPinned = entity{"tweet", "is_pinned"}.getBool(false) - - with tweet, js{"twitter_objects", "tweets", $tweetId}: - var parsed = parseUserTimelineTweet(tweet, users) - - if not parsed.isNil and parsed.available: - if parsed.quote.isSome: - parsed.quote = some parseUserTimelineTweet(tweet{"quoted_status"}, users) - - if parsed.retweet.isSome: - let retweet = parseUserTimelineTweet(tweet{"retweeted_status"}, users) - if retweet.quote.isSome: - retweet.quote = some parseUserTimelineTweet(tweet{"retweeted_status", "quoted_status"}, users) - parsed.retweet = some retweet - - if isPinned: - parsed.pinned = true - result.pinned = some parsed - else: - result.tweets.content.add parsed - - result.tweets.bottom = js{"response", "cursor", "bottom"}.getStr - -# proc finalizeTweet(global: GlobalObjects; id: string): Tweet = -# let intId = if id.len > 0: parseBiggestInt(id) else: 0 -# result = global.tweets.getOrDefault(id, Tweet(id: intId)) - -# if result.quote.isSome: -# let quote = get(result.quote).id -# if $quote in global.tweets: -# result.quote = some global.tweets[$quote] -# else: -# result.quote = some Tweet() - -# if result.retweet.isSome: -# let rt = get(result.retweet).id -# if $rt in global.tweets: -# result.retweet = some finalizeTweet(global, $rt) -# else: -# result.retweet = some Tweet() - -# proc parsePin(js: JsonNode; global: GlobalObjects): Tweet = -# let pin = js{"pinEntry", "entry", "entryId"}.getStr -# if pin.len == 0: return - -# let id = pin.getId -# if id notin global.tweets: return - -# global.tweets[id].pinned = true -# return finalizeTweet(global, id) - -# proc parseGlobalObjects(js: JsonNode): GlobalObjects = -# result = GlobalObjects() -# let -# tweets = ? js{"globalObjects", "tweets"} -# users = ? js{"globalObjects", "users"} - -# for k, v in users: -# result.users[k] = parseUser(v, k) - -# for k, v in tweets: -# var tweet = parseTweet(v, v{"card"}) -# if tweet.user.id in result.users: -# tweet.user = result.users[tweet.user.id] -# result.tweets[k] = tweet - -# proc parseInstructions(res: var Profile; global: GlobalObjects; js: JsonNode) = -# if js.kind != JArray or js.len == 0: -# return - -# for i in js: -# if res.tweets.beginning and i{"pinEntry"}.notNull: -# with pin, parsePin(i, global): -# res.pinned = some pin - -# with r, i{"replaceEntry", "entry"}: -# if "top" in r{"entryId"}.getStr: -# res.tweets.top = r.getCursor -# elif "bottom" in r{"entryId"}.getStr: -# res.tweets.bottom = r.getCursor - -# proc parseTimeline*(js: JsonNode; after=""): Profile = -# result = Profile(tweets: Timeline(beginning: after.len == 0)) -# let global = parseGlobalObjects(? js) - -# let instructions = ? js{"timeline", "instructions"} -# if instructions.len == 0: return - -# result.parseInstructions(global, instructions) - -# var entries: JsonNode -# for i in instructions: -# if "addEntries" in i: -# entries = i{"addEntries", "entries"} - -# for e in ? entries: -# let entry = e{"entryId"}.getStr -# if "tweet" in entry or entry.startsWith("sq-I-t") or "tombstone" in entry: -# let tweet = finalizeTweet(global, e.getEntryId) -# if not tweet.available: continue -# result.tweets.content.add tweet -# elif "cursor-top" in entry: -# result.tweets.top = e.getCursor -# elif "cursor-bottom" in entry: -# result.tweets.bottom = e.getCursor -# elif entry.startsWith("sq-cursor"): -# with cursor, e{"content", "operation", "cursor"}: -# if cursor{"cursorType"}.getStr == "Bottom": -# result.tweets.bottom = cursor{"value"}.getStr -# else: -# result.tweets.top = cursor{"value"}.getStr - proc parsePhotoRail*(js: JsonNode): PhotoRail = with error, js{"error"}: if error.getStr == "Not authorized.": @@ -597,8 +436,8 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile = tweet.id = parseBiggestInt(entryId) result.pinned = some tweet -proc parseGraphSearch*(js: JsonNode; after=""): Timeline = - result = Timeline(beginning: after.len == 0) +proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] = + result = Result[T](beginning: after.len == 0) let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"} if instructions.len == 0: @@ -607,15 +446,21 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline = for instruction in instructions: let typ = instruction{"type"}.getStr if typ == "TimelineAddEntries": - for e in instructions[0]{"entries"}: + for e in instruction{"entries"}: let entryId = e{"entryId"}.getStr - if entryId.startsWith("tweet"): - with tweetResult, e{"content", "itemContent", "tweet_results", "result"}: - let tweet = parseGraphTweet(tweetResult) - if not tweet.available: - tweet.id = parseBiggestInt(entryId.getId()) - result.content.add tweet - elif entryId.startsWith("cursor-bottom"): + when T is Tweets: + if entryId.startsWith("tweet"): + with tweetRes, e{"content", "itemContent", "tweet_results", "result"}: + let tweet = parseGraphTweet(tweetRes) + if not tweet.available: + tweet.id = parseBiggestInt(entryId.getId()) + result.content.add tweet + elif T is User: + if entryId.startsWith("user"): + with userRes, e{"content", "itemContent"}: + result.content.add parseGraphUser(userRes) + + if entryId.startsWith("cursor-bottom"): result.bottom = e{"content", "value"}.getStr elif typ == "TimelineReplaceEntry": if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"): diff --git a/src/redis_cache.nim b/src/redis_cache.nim index 89161be..2387a42 100644 --- a/src/redis_cache.nim +++ b/src/redis_cache.nim @@ -147,15 +147,15 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} = if result.len > 0 and user.id.len > 0: await all(cacheUserId(result, user.id), cache(user)) -proc getCachedTweet*(id: int64): Future[Tweet] {.async.} = - if id == 0: return - let tweet = await get(id.tweetKey) - if tweet != redisNil: - tweet.deserialize(Tweet) - else: - result = await getGraphTweetResult($id) - if not result.isNil: - await cache(result) +# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} = +# if id == 0: return +# let tweet = await get(id.tweetKey) +# if tweet != redisNil: +# tweet.deserialize(Tweet) +# else: +# result = await getGraphTweetResult($id) +# if not result.isNil: +# await cache(result) proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} = if name.len == 0: return diff --git a/src/routes/rss.nim b/src/routes/rss.nim index d378396..6c77992 100644 --- a/src/routes/rss.nim +++ b/src/routes/rss.nim @@ -27,7 +27,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async. else: var q = query q.fromUser = names - profile.tweets = await getTweetSearch(q, after) + profile.tweets = await getGraphTweetSearch(q, after) # this is kinda dumb profile.user = User( username: name, @@ -76,7 +76,7 @@ proc createRssRouter*(cfg: Config) = if rss.cursor.len > 0: respRss(rss, "Search") - let tweets = await getTweetSearch(query, cursor) + let tweets = await getGraphTweetSearch(query, cursor) rss.cursor = tweets.bottom rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg) diff --git a/src/routes/search.nim b/src/routes/search.nim index c270df5..e9f991d 100644 --- a/src/routes/search.nim +++ b/src/routes/search.nim @@ -29,13 +29,13 @@ proc createSearchRouter*(cfg: Config) = redirect("/" & q) var users: Result[User] try: - users = await getUserSearch(query, getCursor()) + users = await getGraphUserSearch(query, getCursor()) except InternalError: users = Result[User](beginning: true, query: query) resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title) of tweets: let - tweets = await getTweetSearch(query, getCursor()) + tweets = await getGraphTweetSearch(query, getCursor()) rss = "/search/rss?" & genQueryUrl(query) resp renderMain(renderTweetSearch(tweets, prefs, getPath()), request, cfg, prefs, title, rss=rss) diff --git a/src/routes/timeline.nim b/src/routes/timeline.nim index 8d02b68..3568ab7 100644 --- a/src/routes/timeline.nim +++ b/src/routes/timeline.nim @@ -53,10 +53,10 @@ proc fetchProfile*(after: string; query: Query; skipRail=false; result = case query.kind - of posts: await getUserTimeline(userId, after) + of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after) of replies: await getGraphUserTweets(userId, TimelineKind.replies, after) of media: await getGraphUserTweets(userId, TimelineKind.media, after) - else: Profile(tweets: await getTweetSearch(query, after)) + else: Profile(tweets: await getGraphTweetSearch(query, after)) result.user = await user result.photoRail = await rail @@ -67,7 +67,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs; rss, after: string): Future[string] {.async.} = if query.fromUser.len != 1: let - timeline = await getTweetSearch(query, after) + timeline = await getGraphTweetSearch(query, after) html = renderTweetSearch(timeline, prefs, getPath()) return renderMain(html, request, cfg, prefs, "Multi", rss=rss) @@ -122,7 +122,7 @@ proc createTimelineRouter*(cfg: Config) = # used for the infinite scroll feature if @"scroll".len > 0: if query.fromUser.len != 1: - var timeline = await getTweetSearch(query, after) + var timeline = await getGraphTweetSearch(query, after) if timeline.content.len == 0: resp Http404 timeline.beginning = true resp $renderTweetSearch(timeline, prefs, getPath()) diff --git a/src/tokens.nim b/src/tokens.nim index decf228..71a7abd 100644 --- a/src/tokens.nim +++ b/src/tokens.nim @@ -1,23 +1,16 @@ # SPDX-License-Identifier: AGPL-3.0-only -import asyncdispatch, httpclient, times, sequtils, json, random -import strutils, tables -import types, consts +import asyncdispatch, times, json, random, strutils, tables +import types -const - maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions - maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires - maxAge = 2.hours + 55.minutes # tokens expire after 3 hours - failDelay = initDuration(minutes=30) +# max requests at a time per account to avoid race conditions +const maxConcurrentReqs = 5 var - tokenPool: seq[Token] - lastFailed: Time + accountPool: seq[GuestAccount] enableLogging = false -let headers = newHttpHeaders({"authorization": auth}) - template log(str) = - if enableLogging: echo "[tokens] ", str + if enableLogging: echo "[accounts] ", str proc getPoolJson*(): JsonNode = var @@ -26,141 +19,94 @@ proc getPoolJson*(): JsonNode = totalPending = 0 reqsPerApi: Table[string, int] - for token in tokenPool: - totalPending.inc(token.pending) - list[token.tok] = %*{ + for account in accountPool: + totalPending.inc(account.pending) + list[account.id] = %*{ "apis": newJObject(), - "pending": token.pending, - "init": $token.init, - "lastUse": $token.lastUse + "pending": account.pending, } - for api in token.apis.keys: - list[token.tok]["apis"][$api] = %token.apis[api] + for api in account.apis.keys: + list[account.id]["apis"][$api] = %account.apis[api].remaining let maxReqs = case api - of Api.search: 100000 + of Api.search: 50 of Api.photoRail: 180 - of Api.timeline: 187 - of Api.userTweets, Api.userTimeline: 300 - of Api.userTweetsAndReplies, Api.userRestId, - Api.userScreenName, Api.tweetDetail, Api.tweetResult, - Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia: 500 - of Api.userSearch: 900 - reqs = maxReqs - token.apis[api].remaining + of Api.userTweets, Api.userTweetsAndReplies, Api.userMedia, + Api.userRestId, Api.userScreenName, + Api.tweetDetail, Api.tweetResult, + Api.list, Api.listTweets, Api.listMembers, Api.listBySlug: 500 + reqs = maxReqs - account.apis[api].remaining reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs totalReqs.inc(reqs) return %*{ - "amount": tokenPool.len, + "amount": accountPool.len, "requests": totalReqs, "pending": totalPending, "apis": reqsPerApi, - "tokens": list + "accounts": list } proc rateLimitError*(): ref RateLimitError = newException(RateLimitError, "rate limited") -proc fetchToken(): Future[Token] {.async.} = - if getTime() - lastFailed < failDelay: - raise rateLimitError() - - let client = newAsyncHttpClient(headers=headers) - - try: - let - resp = await client.postContent(activate) - tokNode = parseJson(resp)["guest_token"] - tok = tokNode.getStr($(tokNode.getInt)) - time = getTime() - - return Token(tok: tok, init: time, lastUse: time) - except Exception as e: - echo "[tokens] fetching token failed: ", e.msg - if "Try again" notin e.msg: - echo "[tokens] fetching tokens paused, resuming in 30 minutes" - lastFailed = getTime() - finally: - client.close() - -proc expired(token: Token): bool = - let time = getTime() - token.init < time - maxAge or token.lastUse < time - maxLastUse - -proc isLimited(token: Token; api: Api): bool = - if token.isNil or token.expired: +proc isLimited(account: GuestAccount; api: Api): bool = + if account.isNil: return true - if api in token.apis: - let limit = token.apis[api] + if api in account.apis: + let limit = account.apis[api] return (limit.remaining <= 10 and limit.reset > epochTime().int) else: return false -proc isReady(token: Token; api: Api): bool = - not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api)) +proc isReady(account: GuestAccount; api: Api): bool = + not (account.isNil or account.pending > maxConcurrentReqs or account.isLimited(api)) -proc release*(token: Token; used=false; invalid=false) = - if token.isNil: return - if invalid or token.expired: - if invalid: log "discarding invalid token" - elif token.expired: log "discarding expired token" +proc release*(account: GuestAccount; used=false; invalid=false) = + if account.isNil: return + if invalid: + log "discarding invalid account: " & account.id - let idx = tokenPool.find(token) - if idx > -1: tokenPool.delete(idx) + let idx = accountPool.find(account) + if idx > -1: accountPool.delete(idx) elif used: - dec token.pending - token.lastUse = getTime() + dec account.pending -proc getToken*(api: Api): Future[Token] {.async.} = - for i in 0 ..< tokenPool.len: +proc getGuestAccount*(api: Api): Future[GuestAccount] {.async.} = + for i in 0 ..< accountPool.len: if result.isReady(api): break release(result) - result = tokenPool.sample() + result = accountPool.sample() - if not result.isReady(api): - release(result) - result = await fetchToken() - log "added new token to pool" - tokenPool.add result - - if not result.isNil: + if not result.isNil and result.isReady(api): inc result.pending else: + log "no accounts available for API: " & $api raise rateLimitError() -proc setRateLimit*(token: Token; api: Api; remaining, reset: int) = +proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) = # avoid undefined behavior in race conditions - if api in token.apis: - let limit = token.apis[api] + if api in account.apis: + let limit = account.apis[api] if limit.reset >= reset and limit.remaining < remaining: return + if limit.reset == reset and limit.remaining >= remaining: + account.apis[api].remaining = remaining + return - token.apis[api] = RateLimit(remaining: remaining, reset: reset) + account.apis[api] = RateLimit(remaining: remaining, reset: reset) -proc poolTokens*(amount: int) {.async.} = - var futs: seq[Future[Token]] - for i in 0 ..< amount: - futs.add fetchToken() - - for token in futs: - var newToken: Token - - try: newToken = await token - except: discard - - if not newToken.isNil: - log "added new token to pool" - tokenPool.add newToken - -proc initTokenPool*(cfg: Config) {.async.} = +proc initAccountPool*(cfg: Config; accounts: JsonNode) = enableLogging = cfg.enableDebug - while true: - if tokenPool.countIt(not it.isLimited(Api.userTimeline)) < cfg.minTokens: - await poolTokens(min(4, cfg.minTokens - tokenPool.len)) - await sleepAsync(2000) + for account in accounts: + accountPool.add GuestAccount( + id: account{"user", "id_str"}.getStr, + oauthToken: account{"oauth_token"}.getStr, + oauthSecret: account{"oauth_token_secret"}.getStr, + ) diff --git a/src/types.nim b/src/types.nim index 1a47d25..2a553dd 100644 --- a/src/types.nim +++ b/src/types.nim @@ -17,11 +17,8 @@ type Api* {.pure.} = enum tweetDetail tweetResult - timeline - userTimeline photoRail search - userSearch list listBySlug listMembers @@ -36,9 +33,11 @@ type remaining*: int reset*: int - Token* = ref object - tok*: string - init*: Time + GuestAccount* = ref object + id*: string + oauthToken*: string + oauthSecret*: string + # init*: Time lastUse*: Time pending*: int apis*: Table[Api, RateLimit]