Merge remote-tracking branch 'upstream/guest_accounts'
This commit is contained in:
commit
f290b7b5e7
18 changed files with 233 additions and 270 deletions
|
@ -1,4 +1,4 @@
|
|||
FROM nimlang/nim:1.6.10-alpine-regular as nim
|
||||
FROM nimlang/nim:2.0.0-alpine-regular as nim
|
||||
LABEL maintainer="setenforce@protonmail.com"
|
||||
|
||||
RUN apk --no-cache add libsass-dev pcre
|
||||
|
|
|
@ -23,7 +23,7 @@ requires "https://github.com/zedeus/redis#d0a0e6f"
|
|||
requires "zippy#ca5989a"
|
||||
requires "flatty#e668085"
|
||||
requires "jsony#ea811be"
|
||||
|
||||
requires "oauth#b8c163b"
|
||||
|
||||
# Tasks
|
||||
|
||||
|
|
|
@ -5,7 +5,7 @@ function insertBeforeLast(node, elem) {
|
|||
}
|
||||
|
||||
function getLoadMore(doc) {
|
||||
return doc.querySelector('.show-more:not(.timeline-item)');
|
||||
return doc.querySelector(".show-more:not(.timeline-item)");
|
||||
}
|
||||
|
||||
function isDuplicate(item, itemClass) {
|
||||
|
@ -15,18 +15,19 @@ function isDuplicate(item, itemClass) {
|
|||
return document.querySelector(itemClass + " .tweet-link[href='" + href + "']") != null;
|
||||
}
|
||||
|
||||
window.onload = function() {
|
||||
window.onload = function () {
|
||||
const url = window.location.pathname;
|
||||
const isTweet = url.indexOf("/status/") !== -1;
|
||||
const containerClass = isTweet ? ".replies" : ".timeline";
|
||||
const itemClass = containerClass + ' > div:not(.top-ref)';
|
||||
const itemClass = containerClass + " > div:not(.top-ref)";
|
||||
|
||||
var html = document.querySelector("html");
|
||||
var container = document.querySelector(containerClass);
|
||||
var loading = false;
|
||||
|
||||
window.addEventListener('scroll', function() {
|
||||
function handleScroll(failed) {
|
||||
if (loading) return;
|
||||
|
||||
if (html.scrollTop + html.clientHeight >= html.scrollHeight - 3000) {
|
||||
loading = true;
|
||||
var loadMore = getLoadMore(document);
|
||||
|
@ -35,13 +36,15 @@ window.onload = function() {
|
|||
loadMore.children[0].text = "Loading...";
|
||||
|
||||
var url = new URL(loadMore.children[0].href);
|
||||
url.searchParams.append('scroll', 'true');
|
||||
url.searchParams.append("scroll", "true");
|
||||
|
||||
fetch(url.toString()).then(function (response) {
|
||||
if (response.status === 404) throw "error";
|
||||
|
||||
return response.text();
|
||||
}).then(function (html) {
|
||||
var parser = new DOMParser();
|
||||
var doc = parser.parseFromString(html, 'text/html');
|
||||
var doc = parser.parseFromString(html, "text/html");
|
||||
loadMore.remove();
|
||||
|
||||
for (var item of doc.querySelectorAll(itemClass)) {
|
||||
|
@ -57,10 +60,18 @@ window.onload = function() {
|
|||
if (isTweet) container.appendChild(newLoadMore);
|
||||
else insertBeforeLast(container, newLoadMore);
|
||||
}).catch(function (err) {
|
||||
console.warn('Something went wrong.', err);
|
||||
loading = true;
|
||||
console.warn("Something went wrong.", err);
|
||||
if (failed > 3) {
|
||||
loadMore.children[0].text = "Error";
|
||||
return;
|
||||
}
|
||||
|
||||
loading = false;
|
||||
handleScroll((failed || 0) + 1);
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
window.addEventListener("scroll", () => handleScroll());
|
||||
};
|
||||
// @license-end
|
||||
|
|
58
src/api.nim
58
src/api.nim
|
@ -33,13 +33,6 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi
|
|||
js = await fetch(url ? params, apiId)
|
||||
result = parseGraphTimeline(js, "user", after)
|
||||
|
||||
# proc getTimeline*(id: string; after=""; replies=false): Future[Profile] {.async.} =
|
||||
# if id.len == 0: return
|
||||
# let
|
||||
# ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
|
||||
# url = oldUserTweets / (id & ".json") ? ps
|
||||
# result = parseTimeline(await fetch(url, Api.timeline), after)
|
||||
|
||||
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
||||
if id.len == 0: return
|
||||
let
|
||||
|
@ -145,10 +138,10 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
|
|||
if after.len > 0:
|
||||
result.replies = await getReplies(id, after)
|
||||
|
||||
proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
|
||||
proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||
let q = genQueryParam(query)
|
||||
if q.len == 0 or q == emptyQuery:
|
||||
return Profile(tweets: Timeline(query: query, beginning: true))
|
||||
return Timeline(query: query, beginning: true)
|
||||
|
||||
var
|
||||
variables = %*{
|
||||
|
@ -162,44 +155,29 @@ proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
|
|||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||
result = Profile(tweets: parseGraphSearch(await fetch(url, Api.search), after))
|
||||
result.tweets.query = query
|
||||
|
||||
proc getTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||
var q = genQueryParam(query)
|
||||
|
||||
if q.len == 0 or q == emptyQuery:
|
||||
return Timeline(query: query, beginning: true)
|
||||
|
||||
if after.len > 0:
|
||||
q &= " max_id:" & after
|
||||
|
||||
let url = tweetSearch ? genParams({
|
||||
"q": q ,
|
||||
"modules": "status",
|
||||
"result_type": "recent",
|
||||
})
|
||||
|
||||
result = parseTweetSearch(await fetch(url, Api.search), after)
|
||||
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
|
||||
result.query = query
|
||||
|
||||
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
|
||||
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
|
||||
if query.text.len == 0:
|
||||
return Result[User](query: query, beginning: true)
|
||||
|
||||
var url = userSearch ? {
|
||||
"q": query.text,
|
||||
"skip_status": "1",
|
||||
"count": "20",
|
||||
"page": page
|
||||
}
|
||||
var
|
||||
variables = %*{
|
||||
"rawQuery": query.text,
|
||||
"count": 20,
|
||||
"product": "People",
|
||||
"withDownvotePerspective": false,
|
||||
"withReactionsMetadata": false,
|
||||
"withReactionsPerspective": false
|
||||
}
|
||||
if after.len > 0:
|
||||
variables["cursor"] = % after
|
||||
result.beginning = false
|
||||
|
||||
result = parseUsers(await fetchRaw(url, Api.userSearch))
|
||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||
result = parseGraphSearch[User](await fetch(url, Api.search), after)
|
||||
result.query = query
|
||||
if page.len == 0:
|
||||
result.bottom = "2"
|
||||
elif page.allCharsInSet(Digits):
|
||||
result.bottom = $(parseInt(page) + 1)
|
||||
|
||||
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||
if name.len == 0: return
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import httpclient, asyncdispatch, options, strutils, uri
|
||||
import jsony, packedjson, zippy
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
|
||||
import jsony, packedjson, zippy, oauth1
|
||||
import types, tokens, consts, parserutils, http_pool
|
||||
import experimental/types/common
|
||||
import config
|
||||
|
@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
|||
for p in pars:
|
||||
result &= p
|
||||
if ext:
|
||||
result &= ("ext", "mediaStats,isBlueVerified,isVerified,blue,blueVerified")
|
||||
result &= ("include_ext_alt_text", "1")
|
||||
result &= ("include_ext_media_stats", "1")
|
||||
result &= ("include_ext_media_availability", "1")
|
||||
if count.len > 0:
|
||||
result &= ("count", count)
|
||||
|
@ -30,12 +30,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
|||
else:
|
||||
result &= ("cursor", cursor)
|
||||
|
||||
proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||
let
|
||||
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
|
||||
params = OAuth1Parameters(
|
||||
consumerKey: consumerKey,
|
||||
signatureMethod: "HMAC-SHA1",
|
||||
timestamp: $int(round(epochTime())),
|
||||
nonce: "0",
|
||||
isIncludeVersionToHeader: true,
|
||||
token: oauthToken
|
||||
)
|
||||
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
|
||||
|
||||
params.signature = percentEncode(signature)
|
||||
|
||||
return getOauth1RequestHeader(params)["authorization"]
|
||||
|
||||
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
||||
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
||||
|
||||
result = newHttpHeaders({
|
||||
"connection": "keep-alive",
|
||||
"authorization": auth,
|
||||
"authorization": header,
|
||||
"content-type": "application/json",
|
||||
"x-guest-token": if token == nil: "" else: token.tok,
|
||||
"x-twitter-active-user": "yes",
|
||||
"authority": "api.twitter.com",
|
||||
"accept-encoding": "gzip",
|
||||
|
@ -44,24 +62,24 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
|
|||
"DNT": "1"
|
||||
})
|
||||
|
||||
template updateToken() =
|
||||
template updateAccount() =
|
||||
if resp.headers.hasKey(rlRemaining):
|
||||
let
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
token.setRateLimit(api, remaining, reset)
|
||||
account.setRateLimit(api, remaining, reset)
|
||||
|
||||
template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
||||
once:
|
||||
pool = HttpPool()
|
||||
|
||||
var token = await getToken(api)
|
||||
if token.tok.len == 0:
|
||||
var account = await getGuestAccount(api)
|
||||
if account.oauthToken.len == 0:
|
||||
raise rateLimitError()
|
||||
|
||||
try:
|
||||
var resp: AsyncResponse
|
||||
var headers = genHeaders(token)
|
||||
var headers = genHeaders($url, account.oauthToken, account.oauthSecret)
|
||||
for key, value in additional_headers.pairs():
|
||||
headers.add(key, value)
|
||||
pool.use(headers):
|
||||
|
@ -86,19 +104,19 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
|||
|
||||
fetchBody
|
||||
|
||||
release(token, used=true)
|
||||
release(account, used=true)
|
||||
|
||||
if resp.status == $Http400:
|
||||
raise newException(InternalError, $url)
|
||||
except InternalError as e:
|
||||
raise e
|
||||
except BadClientError as e:
|
||||
release(token, used=true)
|
||||
release(account, used=true)
|
||||
raise e
|
||||
except Exception as e:
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", account.id, ", url: ", url
|
||||
if "length" notin e.msg and "descriptor" notin e.msg:
|
||||
release(token, invalid=true)
|
||||
release(account, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
||||
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
||||
|
@ -116,12 +134,12 @@ proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders
|
|||
echo resp.status, ": ", body, " --- url: ", url
|
||||
result = newJNull()
|
||||
|
||||
updateToken()
|
||||
updateAccount()
|
||||
|
||||
let error = result.getError
|
||||
if error in {invalidToken, badToken}:
|
||||
echo "fetch error: ", result.getError
|
||||
release(token, invalid=true)
|
||||
release(account, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
||||
proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[string] {.async.} =
|
||||
|
@ -130,11 +148,11 @@ proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHead
|
|||
echo resp.status, ": ", result, " --- url: ", url
|
||||
result.setLen(0)
|
||||
|
||||
updateToken()
|
||||
updateAccount()
|
||||
|
||||
if result.startsWith("{\"errors"):
|
||||
let errors = result.fromJson(Errors)
|
||||
if errors in {invalidToken, badToken}:
|
||||
echo "fetch error: ", errors
|
||||
release(token, invalid=true)
|
||||
release(account, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
import uri, sequtils, strutils
|
||||
|
||||
const
|
||||
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
|
||||
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
|
||||
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
|
||||
|
||||
api = parseUri("https://api.twitter.com")
|
||||
activate* = $(api / "1.1/guest/activate.json")
|
||||
|
@ -11,10 +12,6 @@ const
|
|||
|
||||
timelineApi = api / "2/timeline"
|
||||
favorites* = timelineApi / "favorites"
|
||||
userSearch* = api / "1.1/users/search.json"
|
||||
tweetSearch* = api / "1.1/search/universal.json"
|
||||
|
||||
# oldUserTweets* = api / "2/timeline/profile"
|
||||
|
||||
graphql = api / "graphql"
|
||||
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
||||
|
@ -35,28 +32,20 @@ const
|
|||
graphFollowing* = graphql / "JPZiqKjET7_M1r5Tlr8pyA/Following"
|
||||
|
||||
timelineParams* = {
|
||||
"cards_platform": "Web-13",
|
||||
"tweet_mode": "extended",
|
||||
"ui_lang": "en-US",
|
||||
"send_error_codes": "1",
|
||||
"simple_quoted_tweet": "1",
|
||||
"skip_status": "1",
|
||||
"include_blocked_by": "0",
|
||||
"include_blocking": "0",
|
||||
"include_can_dm": "0",
|
||||
"include_can_media_tag": "1",
|
||||
"include_cards": "1",
|
||||
"include_composer_source": "0",
|
||||
"include_entities": "1",
|
||||
"include_ext_is_blue_verified": "1",
|
||||
"include_ext_media_color": "0",
|
||||
"include_followed_by": "0",
|
||||
"include_mute_edge": "0",
|
||||
"include_profile_interstitial_type": "0",
|
||||
"include_quote_count": "1",
|
||||
"include_reply_count": "1",
|
||||
"include_user_entities": "1",
|
||||
"include_want_retweets": "0",
|
||||
"include_ext_reply_count": "1",
|
||||
"include_ext_is_blue_verified": "1",
|
||||
"include_ext_media_color": "0",
|
||||
"cards_platform": "Web-13",
|
||||
"tweet_mode": "extended",
|
||||
"send_error_codes": "1",
|
||||
"simple_quoted_tweet": "1"
|
||||
}.toSeq
|
||||
|
||||
gqlFeatures* = """{
|
||||
|
|
|
@ -1,7 +1,10 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, strformat, logging
|
||||
import config
|
||||
from net import Port
|
||||
from htmlgen import a
|
||||
from os import getEnv
|
||||
from json import parseJson
|
||||
|
||||
import jester
|
||||
|
||||
|
@ -14,6 +17,12 @@ import routes/[
|
|||
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
|
||||
const issuesUrl = "https://github.com/zedeus/nitter/issues"
|
||||
|
||||
let
|
||||
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||
accounts = parseJson(readFile(accountsPath))
|
||||
|
||||
initAccountPool(cfg, parseJson(readFile(accountsPath)))
|
||||
|
||||
if not cfg.enableDebug:
|
||||
# Silence Jester's query warning
|
||||
addHandler(newConsoleLogger())
|
||||
|
@ -34,8 +43,6 @@ waitFor initRedisPool(cfg)
|
|||
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
|
||||
stdout.flushFile
|
||||
|
||||
asyncCheck initTokenPool(cfg)
|
||||
|
||||
createUnsupportedRouter(cfg)
|
||||
createResolverRouter(cfg)
|
||||
createPrefRouter(cfg)
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import strutils, options, tables, times, math
|
||||
import strutils, options, times, math, tables
|
||||
import packedjson, packedjson/deserialiser
|
||||
import types, parserutils, utils
|
||||
import experimental/parser/unifiedcard
|
||||
|
@ -29,10 +29,8 @@ proc parseUser(js: JsonNode; id=""): User =
|
|||
result.expandUserEntities(js)
|
||||
|
||||
proc parseGraphUser(js: JsonNode): User =
|
||||
var user: JsonNode
|
||||
if "user_result" in js:
|
||||
user = ? js{"user_result", "result"}
|
||||
else:
|
||||
var user = js{"user_result", "result"}
|
||||
if user.isNull:
|
||||
user = ? js{"user_results", "result"}
|
||||
result = parseUser(user{"legacy"})
|
||||
|
||||
|
@ -85,7 +83,7 @@ proc parseGif(js: JsonNode): Gif =
|
|||
proc parseVideo(js: JsonNode): Video =
|
||||
result = Video(
|
||||
thumb: js{"media_url_https"}.getImageStr,
|
||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr($js{"mediaStats", "viewCount"}.getInt),
|
||||
views: getVideoViewCount(js),
|
||||
available: true,
|
||||
title: js{"ext_alt_text"}.getStr,
|
||||
durationMs: js{"video_info", "duration_millis"}.getInt
|
||||
|
@ -586,8 +584,8 @@ proc parseGraphRetweetersTimeline*(js: JsonNode; root: string; after=""): UsersT
|
|||
proc parseGraphFollowTimeline*(js: JsonNode; root: string; after=""): UsersTimeline =
|
||||
return parseGraphUsersTimeline(js{"data", "user", "result", "timeline", "timeline"}, after)
|
||||
|
||||
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
|
||||
result = Timeline(beginning: after.len == 0)
|
||||
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
|
||||
result = Result[T](beginning: after.len == 0)
|
||||
|
||||
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
||||
if instructions.len == 0:
|
||||
|
@ -596,15 +594,21 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
|
|||
for instruction in instructions:
|
||||
let typ = instruction{"type"}.getStr
|
||||
if typ == "TimelineAddEntries":
|
||||
for e in instructions[0]{"entries"}:
|
||||
for e in instruction{"entries"}:
|
||||
let entryId = e{"entryId"}.getStr
|
||||
if entryId.startsWith("tweet"):
|
||||
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
|
||||
let tweet = parseGraphTweet(tweetResult)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
result.content.add tweet
|
||||
elif entryId.startsWith("cursor-bottom"):
|
||||
when T is Tweets:
|
||||
if entryId.startsWith("tweet"):
|
||||
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
|
||||
let tweet = parseGraphTweet(tweetRes)
|
||||
if not tweet.available:
|
||||
tweet.id = parseBiggestInt(entryId.getId())
|
||||
result.content.add tweet
|
||||
elif T is User:
|
||||
if entryId.startsWith("user"):
|
||||
with userRes, e{"content", "itemContent"}:
|
||||
result.content.add parseGraphUser(userRes)
|
||||
|
||||
if entryId.startsWith("cursor-bottom"):
|
||||
result.bottom = e{"content", "value"}.getStr
|
||||
elif typ == "TimelineReplaceEntry":
|
||||
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
|
||||
|
|
|
@ -36,7 +36,7 @@ template with*(ident, value, body): untyped =
|
|||
template with*(ident; value: JsonNode; body): untyped =
|
||||
if true:
|
||||
let ident {.inject.} = value
|
||||
if value.notNull: body
|
||||
if value.kind != JNull: body
|
||||
|
||||
template getCursor*(js: JsonNode): string =
|
||||
js{"content", "operation", "cursor", "value"}.getStr
|
||||
|
@ -148,6 +148,12 @@ proc getMp4Resolution*(url: string): int =
|
|||
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
||||
return 0
|
||||
|
||||
proc getVideoViewCount*(js: JsonNode): string =
|
||||
with stats, js{"ext_media_stats"}:
|
||||
return stats{"view_count"}.getStr($stats{"viewCount"}.getInt)
|
||||
|
||||
return $js{"mediaStats", "viewCount"}.getInt(0)
|
||||
|
||||
proc extractSlice(js: JsonNode): Slice[int] =
|
||||
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
||||
|
||||
|
|
|
@ -147,15 +147,15 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
|||
if result.len > 0 and user.id.len > 0:
|
||||
await all(cacheUserId(result, user.id), cache(user))
|
||||
|
||||
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
||||
if id == 0: return
|
||||
let tweet = await get(id.tweetKey)
|
||||
if tweet != redisNil:
|
||||
tweet.deserialize(Tweet)
|
||||
else:
|
||||
result = await getGraphTweetResult($id)
|
||||
if not result.isNil:
|
||||
await cache(result)
|
||||
# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
||||
# if id == 0: return
|
||||
# let tweet = await get(id.tweetKey)
|
||||
# if tweet != redisNil:
|
||||
# tweet.deserialize(Tweet)
|
||||
# else:
|
||||
# result = await getGraphTweetResult($id)
|
||||
# if not result.isNil:
|
||||
# await cache(result)
|
||||
|
||||
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||
if name.len == 0: return
|
||||
|
|
|
@ -27,7 +27,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
|
|||
else:
|
||||
var q = query
|
||||
q.fromUser = names
|
||||
profile.tweets = await getTweetSearch(q, after)
|
||||
profile.tweets = await getGraphTweetSearch(q, after)
|
||||
# this is kinda dumb
|
||||
profile.user = User(
|
||||
username: name,
|
||||
|
@ -76,7 +76,7 @@ proc createRssRouter*(cfg: Config) =
|
|||
if rss.cursor.len > 0:
|
||||
respRss(rss, "Search")
|
||||
|
||||
let tweets = await getTweetSearch(query, cursor)
|
||||
let tweets = await getGraphTweetSearch(query, cursor)
|
||||
rss.cursor = tweets.bottom
|
||||
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
|
||||
|
||||
|
|
|
@ -29,13 +29,13 @@ proc createSearchRouter*(cfg: Config) =
|
|||
redirect("/" & q)
|
||||
var users: Result[User]
|
||||
try:
|
||||
users = await getUserSearch(query, getCursor())
|
||||
users = await getGraphUserSearch(query, getCursor())
|
||||
except InternalError:
|
||||
users = Result[User](beginning: true, query: query)
|
||||
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
|
||||
of tweets:
|
||||
let
|
||||
tweets = await getTweetSearch(query, getCursor())
|
||||
tweets = await getGraphTweetSearch(query, getCursor())
|
||||
rss = "/search/rss?" & genQueryUrl(query)
|
||||
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
|
||||
request, cfg, prefs, title, rss=rss)
|
||||
|
|
|
@ -54,33 +54,22 @@ proc fetchProfile*(after: string; query: Query; cfg: Config; skipRail=false;
|
|||
|
||||
result =
|
||||
case query.kind
|
||||
# of posts: await getTimeline(userId, after)
|
||||
of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after)
|
||||
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
|
||||
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
|
||||
of favorites: await getFavorites(userId, cfg, after)
|
||||
else: Profile(tweets: await getTweetSearch(query, after))
|
||||
else: Profile(tweets: await getGraphTweetSearch(query, after))
|
||||
|
||||
result.user = await user
|
||||
result.photoRail = await rail
|
||||
|
||||
result.tweets.query = query
|
||||
|
||||
if result.user.protected or result.user.suspended:
|
||||
return
|
||||
|
||||
if not skipPinned and query.kind == posts and
|
||||
result.user.pinnedTweet > 0 and after.len == 0:
|
||||
let tweet = await getCachedTweet(result.user.pinnedTweet)
|
||||
if not tweet.isNil:
|
||||
tweet.pinned = true
|
||||
tweet.user = result.user
|
||||
result.pinned = some tweet
|
||||
|
||||
proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
|
||||
rss, after: string): Future[string] {.async.} =
|
||||
if query.fromUser.len != 1:
|
||||
let
|
||||
timeline = await getTweetSearch(query, after)
|
||||
timeline = await getGraphTweetSearch(query, after)
|
||||
html = renderTweetSearch(timeline, prefs, getPath())
|
||||
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
|
||||
|
||||
|
@ -142,7 +131,7 @@ proc createTimelineRouter*(cfg: Config) =
|
|||
# used for the infinite scroll feature
|
||||
if @"scroll".len > 0:
|
||||
if query.fromUser.len != 1:
|
||||
var timeline = (await getGraphSearch(query, after)).tweets
|
||||
var timeline = await getGraphTweetSearch(query, after)
|
||||
if timeline.content.len == 0: resp Http404
|
||||
timeline.beginning = true
|
||||
resp $renderTweetSearch(timeline, prefs, getPath())
|
||||
|
|
178
src/tokens.nim
178
src/tokens.nim
|
@ -1,23 +1,18 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, httpclient, times, sequtils, json, random
|
||||
import strutils, tables
|
||||
import types, consts
|
||||
import asyncdispatch, times, json, random, strutils, tables
|
||||
import types
|
||||
|
||||
# max requests at a time per account to avoid race conditions
|
||||
const
|
||||
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
|
||||
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
|
||||
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
|
||||
failDelay = initDuration(minutes=30)
|
||||
maxConcurrentReqs = 5
|
||||
dayInSeconds = 24 * 60 * 60
|
||||
|
||||
var
|
||||
tokenPool: seq[Token]
|
||||
lastFailed: Time
|
||||
accountPool: seq[GuestAccount]
|
||||
enableLogging = false
|
||||
|
||||
let headers = newHttpHeaders({"authorization": auth})
|
||||
|
||||
template log(str) =
|
||||
if enableLogging: echo "[tokens] ", str
|
||||
if enableLogging: echo "[accounts] ", str
|
||||
|
||||
proc getPoolJson*(): JsonNode =
|
||||
var
|
||||
|
@ -26,142 +21,111 @@ proc getPoolJson*(): JsonNode =
|
|||
totalPending = 0
|
||||
reqsPerApi: Table[string, int]
|
||||
|
||||
for token in tokenPool:
|
||||
totalPending.inc(token.pending)
|
||||
list[token.tok] = %*{
|
||||
let now = epochTime().int
|
||||
|
||||
for account in accountPool:
|
||||
totalPending.inc(account.pending)
|
||||
list[account.id] = %*{
|
||||
"apis": newJObject(),
|
||||
"pending": token.pending,
|
||||
"init": $token.init,
|
||||
"lastUse": $token.lastUse
|
||||
"pending": account.pending,
|
||||
}
|
||||
|
||||
for api in token.apis.keys:
|
||||
list[token.tok]["apis"][$api] = %token.apis[api]
|
||||
for api in account.apis.keys:
|
||||
let obj = %*{}
|
||||
if account.apis[api].limited:
|
||||
obj["limited"] = %true
|
||||
|
||||
if account.apis[api].reset > now.int:
|
||||
obj["remaining"] = %account.apis[api].remaining
|
||||
|
||||
list[account.id]["apis"][$api] = obj
|
||||
|
||||
if "remaining" notin obj:
|
||||
continue
|
||||
|
||||
let
|
||||
maxReqs =
|
||||
case api
|
||||
of Api.search: 100000
|
||||
of Api.search: 50
|
||||
of Api.photoRail: 180
|
||||
of Api.timeline: 187
|
||||
of Api.userTweets: 300
|
||||
of Api.userTweetsAndReplies, Api.userRestId,
|
||||
Api.userScreenName, Api.tweetDetail, Api.tweetResult: 500
|
||||
of Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia, Api.favorites, Api.retweeters, Api.favoriters: 500
|
||||
of Api.userSearch: 900
|
||||
else: 180
|
||||
reqs = maxReqs - token.apis[api].remaining
|
||||
of Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
|
||||
Api.userRestId, Api.userScreenName,
|
||||
Api.tweetDetail, Api.tweetResult,
|
||||
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.favorites, Api.retweeters, Api.favoriters, Api.following, Api.followers: 500
|
||||
reqs = maxReqs - account.apis[api].remaining
|
||||
|
||||
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
|
||||
totalReqs.inc(reqs)
|
||||
|
||||
return %*{
|
||||
"amount": tokenPool.len,
|
||||
"amount": accountPool.len,
|
||||
"requests": totalReqs,
|
||||
"pending": totalPending,
|
||||
"apis": reqsPerApi,
|
||||
"tokens": list
|
||||
"accounts": list
|
||||
}
|
||||
|
||||
proc rateLimitError*(): ref RateLimitError =
|
||||
newException(RateLimitError, "rate limited")
|
||||
|
||||
proc fetchToken(): Future[Token] {.async.} =
|
||||
if getTime() - lastFailed < failDelay:
|
||||
raise rateLimitError()
|
||||
|
||||
let client = newAsyncHttpClient(headers=headers)
|
||||
|
||||
try:
|
||||
let
|
||||
resp = await client.postContent(activate)
|
||||
tokNode = parseJson(resp)["guest_token"]
|
||||
tok = tokNode.getStr($(tokNode.getInt))
|
||||
time = getTime()
|
||||
|
||||
return Token(tok: tok, init: time, lastUse: time)
|
||||
except Exception as e:
|
||||
echo "[tokens] fetching token failed: ", e.msg
|
||||
if "Try again" notin e.msg:
|
||||
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
|
||||
lastFailed = getTime()
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
proc expired(token: Token): bool =
|
||||
let time = getTime()
|
||||
token.init < time - maxAge or token.lastUse < time - maxLastUse
|
||||
|
||||
proc isLimited(token: Token; api: Api): bool =
|
||||
if token.isNil or token.expired:
|
||||
proc isLimited(account: GuestAccount; api: Api): bool =
|
||||
if account.isNil:
|
||||
return true
|
||||
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
return (limit.remaining <= 10 and limit.reset > epochTime().int)
|
||||
if api in account.apis:
|
||||
let limit = account.apis[api]
|
||||
|
||||
if limit.limited and (epochTime().int - limit.limitedAt) > dayInSeconds:
|
||||
account.apis[api].limited = false
|
||||
echo "account limit reset, api: ", api, ", id: ", account.id
|
||||
|
||||
return limit.limited or (limit.remaining <= 10 and limit.reset > epochTime().int)
|
||||
else:
|
||||
return false
|
||||
|
||||
proc isReady(token: Token; api: Api): bool =
|
||||
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
|
||||
proc isReady(account: GuestAccount; api: Api): bool =
|
||||
not (account.isNil or account.pending > maxConcurrentReqs or account.isLimited(api))
|
||||
|
||||
proc release*(token: Token; used=false; invalid=false) =
|
||||
if token.isNil: return
|
||||
if invalid or token.expired:
|
||||
if invalid: log "discarding invalid token"
|
||||
elif token.expired: log "discarding expired token"
|
||||
proc release*(account: GuestAccount; used=false; invalid=false) =
|
||||
if account.isNil: return
|
||||
if invalid:
|
||||
log "discarding invalid account: " & account.id
|
||||
|
||||
let idx = tokenPool.find(token)
|
||||
if idx > -1: tokenPool.delete(idx)
|
||||
let idx = accountPool.find(account)
|
||||
if idx > -1: accountPool.delete(idx)
|
||||
elif used:
|
||||
dec token.pending
|
||||
token.lastUse = getTime()
|
||||
dec account.pending
|
||||
|
||||
proc getToken*(api: Api): Future[Token] {.async.} =
|
||||
for i in 0 ..< tokenPool.len:
|
||||
proc getGuestAccount*(api: Api): Future[GuestAccount] {.async.} =
|
||||
for i in 0 ..< accountPool.len:
|
||||
if result.isReady(api): break
|
||||
release(result)
|
||||
result = tokenPool.sample()
|
||||
result = accountPool.sample()
|
||||
|
||||
if not result.isReady(api):
|
||||
release(result)
|
||||
result = await fetchToken()
|
||||
log "added new token to pool"
|
||||
tokenPool.add result
|
||||
|
||||
if not result.isNil:
|
||||
if not result.isNil and result.isReady(api):
|
||||
inc result.pending
|
||||
else:
|
||||
log "no accounts available for API: " & $api
|
||||
raise rateLimitError()
|
||||
|
||||
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
|
||||
proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) =
|
||||
# avoid undefined behavior in race conditions
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
if api in account.apis:
|
||||
let limit = account.apis[api]
|
||||
if limit.reset >= reset and limit.remaining < remaining:
|
||||
return
|
||||
if limit.reset == reset and limit.remaining >= remaining:
|
||||
account.apis[api].remaining = remaining
|
||||
return
|
||||
|
||||
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
||||
account.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
||||
|
||||
proc poolTokens*(amount: int) {.async.} =
|
||||
var futs: seq[Future[Token]]
|
||||
for i in 0 ..< amount:
|
||||
futs.add fetchToken()
|
||||
|
||||
for token in futs:
|
||||
var newToken: Token
|
||||
|
||||
try: newToken = await token
|
||||
except: discard
|
||||
|
||||
if not newToken.isNil:
|
||||
log "added new token to pool"
|
||||
tokenPool.add newToken
|
||||
|
||||
proc initTokenPool*(cfg: Config) {.async.} =
|
||||
proc initAccountPool*(cfg: Config; accounts: JsonNode) =
|
||||
enableLogging = cfg.enableDebug
|
||||
|
||||
while true:
|
||||
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
|
||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
||||
await sleepAsync(2000)
|
||||
for account in accounts:
|
||||
accountPool.add GuestAccount(
|
||||
id: account{"user", "id_str"}.getStr,
|
||||
oauthToken: account{"oauth_token"}.getStr,
|
||||
oauthSecret: account{"oauth_token_secret"}.getStr,
|
||||
)
|
||||
|
|
|
@ -17,10 +17,8 @@ type
|
|||
Api* {.pure.} = enum
|
||||
tweetDetail
|
||||
tweetResult
|
||||
timeline
|
||||
photoRail
|
||||
search
|
||||
userSearch
|
||||
list
|
||||
listBySlug
|
||||
listMembers
|
||||
|
@ -39,10 +37,14 @@ type
|
|||
RateLimit* = object
|
||||
remaining*: int
|
||||
reset*: int
|
||||
limited*: bool
|
||||
limitedAt*: int
|
||||
|
||||
Token* = ref object
|
||||
tok*: string
|
||||
init*: Time
|
||||
GuestAccount* = ref object
|
||||
id*: string
|
||||
oauthToken*: string
|
||||
oauthSecret*: string
|
||||
# init*: Time
|
||||
lastUse*: Time
|
||||
pending*: int
|
||||
apis*: Table[Api, RateLimit]
|
||||
|
|
|
@ -4,7 +4,7 @@ from parameterized import parameterized
|
|||
profiles = [
|
||||
['mobile_test', 'Test account',
|
||||
'Test Account. test test Testing username with @mobile_test_2 and a #hashtag',
|
||||
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '100'],
|
||||
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '98'],
|
||||
['mobile_test_2', 'mobile test 2', '', '', '', 'Joined January 2011', '13']
|
||||
]
|
||||
|
||||
|
|
|
@ -12,12 +12,7 @@ empty = [['emptyuser'], ['mobile_test_10']]
|
|||
|
||||
protected = [['mobile_test_7'], ['Empty_user']]
|
||||
|
||||
photo_rail = [['mobile_test', [
|
||||
'BzUnaDFCUAAmrjs', 'Bo0nDsYIYAIjqVn', 'Bos--KNIQAAA7Li', 'Boq1sDJIYAAxaoi',
|
||||
'BonISmPIEAAhP3G', 'BoQbwJAIUAA0QCY', 'BoQbRQxIIAA3FWD', 'Bn8Qh8iIIAABXrG',
|
||||
'Bn8QIG3IYAA0IGT', 'Bn8O3QeIUAAONai', 'Bn8NGViIAAATNG4', 'BkKovdrCUAAEz79',
|
||||
'BkKoe_oCIAASAqr', 'BkKoRLNCAAAYfDf', 'BkKndxoCQAE1vFt', 'BPEmIbYCMAE44dl'
|
||||
]]]
|
||||
photo_rail = [['mobile_test', ['Bo0nDsYIYAIjqVn', 'BoQbwJAIUAA0QCY', 'BoQbRQxIIAA3FWD', 'Bn8Qh8iIIAABXrG']]]
|
||||
|
||||
|
||||
class TweetTest(BaseTestCase):
|
||||
|
@ -60,10 +55,10 @@ class TweetTest(BaseTestCase):
|
|||
self.assert_element_absent(Timeline.older)
|
||||
self.assert_element_absent(Timeline.end)
|
||||
|
||||
@parameterized.expand(photo_rail)
|
||||
def test_photo_rail(self, username, images):
|
||||
self.open_nitter(username)
|
||||
self.assert_element_visible(Timeline.photo_rail)
|
||||
for i, url in enumerate(images):
|
||||
img = self.get_attribute(Timeline.photo_rail + f' a:nth-child({i + 1}) img', 'src')
|
||||
self.assertIn(url, img)
|
||||
#@parameterized.expand(photo_rail)
|
||||
#def test_photo_rail(self, username, images):
|
||||
#self.open_nitter(username)
|
||||
#self.assert_element_visible(Timeline.photo_rail)
|
||||
#for i, url in enumerate(images):
|
||||
#img = self.get_attribute(Timeline.photo_rail + f' a:nth-child({i + 1}) img', 'src')
|
||||
#self.assertIn(url, img)
|
||||
|
|
|
@ -28,14 +28,14 @@ video_m3u8 = [
|
|||
]
|
||||
|
||||
gallery = [
|
||||
['mobile_test/status/451108446603980803', [
|
||||
['BkKovdrCUAAEz79', 'BkKovdcCEAAfoBO']
|
||||
]],
|
||||
# ['mobile_test/status/451108446603980803', [
|
||||
# ['BkKovdrCUAAEz79', 'BkKovdcCEAAfoBO']
|
||||
# ]],
|
||||
|
||||
['mobile_test/status/471539824713691137', [
|
||||
['Bos--KNIQAAA7Li', 'Bos--FAIAAAWpah'],
|
||||
['Bos--IqIQAAav23']
|
||||
]],
|
||||
# ['mobile_test/status/471539824713691137', [
|
||||
# ['Bos--KNIQAAA7Li', 'Bos--FAIAAAWpah'],
|
||||
# ['Bos--IqIQAAav23']
|
||||
# ]],
|
||||
|
||||
['mobile_test/status/469530783384743936', [
|
||||
['BoQbwJAIUAA0QCY', 'BoQbwN1IMAAuTiP'],
|
||||
|
|
Loading…
Reference in a new issue