Merge remote-tracking branch 'upstream/guest_accounts'
This commit is contained in:
commit
f290b7b5e7
18 changed files with 233 additions and 270 deletions
|
@ -1,4 +1,4 @@
|
||||||
FROM nimlang/nim:1.6.10-alpine-regular as nim
|
FROM nimlang/nim:2.0.0-alpine-regular as nim
|
||||||
LABEL maintainer="setenforce@protonmail.com"
|
LABEL maintainer="setenforce@protonmail.com"
|
||||||
|
|
||||||
RUN apk --no-cache add libsass-dev pcre
|
RUN apk --no-cache add libsass-dev pcre
|
||||||
|
|
|
@ -23,7 +23,7 @@ requires "https://github.com/zedeus/redis#d0a0e6f"
|
||||||
requires "zippy#ca5989a"
|
requires "zippy#ca5989a"
|
||||||
requires "flatty#e668085"
|
requires "flatty#e668085"
|
||||||
requires "jsony#ea811be"
|
requires "jsony#ea811be"
|
||||||
|
requires "oauth#b8c163b"
|
||||||
|
|
||||||
# Tasks
|
# Tasks
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ function insertBeforeLast(node, elem) {
|
||||||
}
|
}
|
||||||
|
|
||||||
function getLoadMore(doc) {
|
function getLoadMore(doc) {
|
||||||
return doc.querySelector('.show-more:not(.timeline-item)');
|
return doc.querySelector(".show-more:not(.timeline-item)");
|
||||||
}
|
}
|
||||||
|
|
||||||
function isDuplicate(item, itemClass) {
|
function isDuplicate(item, itemClass) {
|
||||||
|
@ -15,18 +15,19 @@ function isDuplicate(item, itemClass) {
|
||||||
return document.querySelector(itemClass + " .tweet-link[href='" + href + "']") != null;
|
return document.querySelector(itemClass + " .tweet-link[href='" + href + "']") != null;
|
||||||
}
|
}
|
||||||
|
|
||||||
window.onload = function() {
|
window.onload = function () {
|
||||||
const url = window.location.pathname;
|
const url = window.location.pathname;
|
||||||
const isTweet = url.indexOf("/status/") !== -1;
|
const isTweet = url.indexOf("/status/") !== -1;
|
||||||
const containerClass = isTweet ? ".replies" : ".timeline";
|
const containerClass = isTweet ? ".replies" : ".timeline";
|
||||||
const itemClass = containerClass + ' > div:not(.top-ref)';
|
const itemClass = containerClass + " > div:not(.top-ref)";
|
||||||
|
|
||||||
var html = document.querySelector("html");
|
var html = document.querySelector("html");
|
||||||
var container = document.querySelector(containerClass);
|
var container = document.querySelector(containerClass);
|
||||||
var loading = false;
|
var loading = false;
|
||||||
|
|
||||||
window.addEventListener('scroll', function() {
|
function handleScroll(failed) {
|
||||||
if (loading) return;
|
if (loading) return;
|
||||||
|
|
||||||
if (html.scrollTop + html.clientHeight >= html.scrollHeight - 3000) {
|
if (html.scrollTop + html.clientHeight >= html.scrollHeight - 3000) {
|
||||||
loading = true;
|
loading = true;
|
||||||
var loadMore = getLoadMore(document);
|
var loadMore = getLoadMore(document);
|
||||||
|
@ -35,13 +36,15 @@ window.onload = function() {
|
||||||
loadMore.children[0].text = "Loading...";
|
loadMore.children[0].text = "Loading...";
|
||||||
|
|
||||||
var url = new URL(loadMore.children[0].href);
|
var url = new URL(loadMore.children[0].href);
|
||||||
url.searchParams.append('scroll', 'true');
|
url.searchParams.append("scroll", "true");
|
||||||
|
|
||||||
fetch(url.toString()).then(function (response) {
|
fetch(url.toString()).then(function (response) {
|
||||||
|
if (response.status === 404) throw "error";
|
||||||
|
|
||||||
return response.text();
|
return response.text();
|
||||||
}).then(function (html) {
|
}).then(function (html) {
|
||||||
var parser = new DOMParser();
|
var parser = new DOMParser();
|
||||||
var doc = parser.parseFromString(html, 'text/html');
|
var doc = parser.parseFromString(html, "text/html");
|
||||||
loadMore.remove();
|
loadMore.remove();
|
||||||
|
|
||||||
for (var item of doc.querySelectorAll(itemClass)) {
|
for (var item of doc.querySelectorAll(itemClass)) {
|
||||||
|
@ -57,10 +60,18 @@ window.onload = function() {
|
||||||
if (isTweet) container.appendChild(newLoadMore);
|
if (isTweet) container.appendChild(newLoadMore);
|
||||||
else insertBeforeLast(container, newLoadMore);
|
else insertBeforeLast(container, newLoadMore);
|
||||||
}).catch(function (err) {
|
}).catch(function (err) {
|
||||||
console.warn('Something went wrong.', err);
|
console.warn("Something went wrong.", err);
|
||||||
loading = true;
|
if (failed > 3) {
|
||||||
|
loadMore.children[0].text = "Error";
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
loading = false;
|
||||||
|
handleScroll((failed || 0) + 1);
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
});
|
}
|
||||||
|
|
||||||
|
window.addEventListener("scroll", () => handleScroll());
|
||||||
};
|
};
|
||||||
// @license-end
|
// @license-end
|
||||||
|
|
58
src/api.nim
58
src/api.nim
|
@ -33,13 +33,6 @@ proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Profi
|
||||||
js = await fetch(url ? params, apiId)
|
js = await fetch(url ? params, apiId)
|
||||||
result = parseGraphTimeline(js, "user", after)
|
result = parseGraphTimeline(js, "user", after)
|
||||||
|
|
||||||
# proc getTimeline*(id: string; after=""; replies=false): Future[Profile] {.async.} =
|
|
||||||
# if id.len == 0: return
|
|
||||||
# let
|
|
||||||
# ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
|
|
||||||
# url = oldUserTweets / (id & ".json") ? ps
|
|
||||||
# result = parseTimeline(await fetch(url, Api.timeline), after)
|
|
||||||
|
|
||||||
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
|
||||||
if id.len == 0: return
|
if id.len == 0: return
|
||||||
let
|
let
|
||||||
|
@ -145,10 +138,10 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
|
||||||
if after.len > 0:
|
if after.len > 0:
|
||||||
result.replies = await getReplies(id, after)
|
result.replies = await getReplies(id, after)
|
||||||
|
|
||||||
proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
|
proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||||
let q = genQueryParam(query)
|
let q = genQueryParam(query)
|
||||||
if q.len == 0 or q == emptyQuery:
|
if q.len == 0 or q == emptyQuery:
|
||||||
return Profile(tweets: Timeline(query: query, beginning: true))
|
return Timeline(query: query, beginning: true)
|
||||||
|
|
||||||
var
|
var
|
||||||
variables = %*{
|
variables = %*{
|
||||||
|
@ -162,44 +155,29 @@ proc getGraphSearch*(query: Query; after=""): Future[Profile] {.async.} =
|
||||||
if after.len > 0:
|
if after.len > 0:
|
||||||
variables["cursor"] = % after
|
variables["cursor"] = % after
|
||||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||||
result = Profile(tweets: parseGraphSearch(await fetch(url, Api.search), after))
|
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
|
||||||
result.tweets.query = query
|
|
||||||
|
|
||||||
proc getTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
|
||||||
var q = genQueryParam(query)
|
|
||||||
|
|
||||||
if q.len == 0 or q == emptyQuery:
|
|
||||||
return Timeline(query: query, beginning: true)
|
|
||||||
|
|
||||||
if after.len > 0:
|
|
||||||
q &= " max_id:" & after
|
|
||||||
|
|
||||||
let url = tweetSearch ? genParams({
|
|
||||||
"q": q ,
|
|
||||||
"modules": "status",
|
|
||||||
"result_type": "recent",
|
|
||||||
})
|
|
||||||
|
|
||||||
result = parseTweetSearch(await fetch(url, Api.search), after)
|
|
||||||
result.query = query
|
result.query = query
|
||||||
|
|
||||||
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
|
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
|
||||||
if query.text.len == 0:
|
if query.text.len == 0:
|
||||||
return Result[User](query: query, beginning: true)
|
return Result[User](query: query, beginning: true)
|
||||||
|
|
||||||
var url = userSearch ? {
|
var
|
||||||
"q": query.text,
|
variables = %*{
|
||||||
"skip_status": "1",
|
"rawQuery": query.text,
|
||||||
"count": "20",
|
"count": 20,
|
||||||
"page": page
|
"product": "People",
|
||||||
}
|
"withDownvotePerspective": false,
|
||||||
|
"withReactionsMetadata": false,
|
||||||
|
"withReactionsPerspective": false
|
||||||
|
}
|
||||||
|
if after.len > 0:
|
||||||
|
variables["cursor"] = % after
|
||||||
|
result.beginning = false
|
||||||
|
|
||||||
result = parseUsers(await fetchRaw(url, Api.userSearch))
|
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||||
|
result = parseGraphSearch[User](await fetch(url, Api.search), after)
|
||||||
result.query = query
|
result.query = query
|
||||||
if page.len == 0:
|
|
||||||
result.bottom = "2"
|
|
||||||
elif page.allCharsInSet(Digits):
|
|
||||||
result.bottom = $(parseInt(page) + 1)
|
|
||||||
|
|
||||||
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||||
if name.len == 0: return
|
if name.len == 0: return
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import httpclient, asyncdispatch, options, strutils, uri
|
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
|
||||||
import jsony, packedjson, zippy
|
import jsony, packedjson, zippy, oauth1
|
||||||
import types, tokens, consts, parserutils, http_pool
|
import types, tokens, consts, parserutils, http_pool
|
||||||
import experimental/types/common
|
import experimental/types/common
|
||||||
import config
|
import config
|
||||||
|
@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
||||||
for p in pars:
|
for p in pars:
|
||||||
result &= p
|
result &= p
|
||||||
if ext:
|
if ext:
|
||||||
result &= ("ext", "mediaStats,isBlueVerified,isVerified,blue,blueVerified")
|
|
||||||
result &= ("include_ext_alt_text", "1")
|
result &= ("include_ext_alt_text", "1")
|
||||||
|
result &= ("include_ext_media_stats", "1")
|
||||||
result &= ("include_ext_media_availability", "1")
|
result &= ("include_ext_media_availability", "1")
|
||||||
if count.len > 0:
|
if count.len > 0:
|
||||||
result &= ("count", count)
|
result &= ("count", count)
|
||||||
|
@ -30,12 +30,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
||||||
else:
|
else:
|
||||||
result &= ("cursor", cursor)
|
result &= ("cursor", cursor)
|
||||||
|
|
||||||
proc genHeaders*(token: Token = nil): HttpHeaders =
|
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||||
|
let
|
||||||
|
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
|
||||||
|
params = OAuth1Parameters(
|
||||||
|
consumerKey: consumerKey,
|
||||||
|
signatureMethod: "HMAC-SHA1",
|
||||||
|
timestamp: $int(round(epochTime())),
|
||||||
|
nonce: "0",
|
||||||
|
isIncludeVersionToHeader: true,
|
||||||
|
token: oauthToken
|
||||||
|
)
|
||||||
|
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
|
||||||
|
|
||||||
|
params.signature = percentEncode(signature)
|
||||||
|
|
||||||
|
return getOauth1RequestHeader(params)["authorization"]
|
||||||
|
|
||||||
|
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
||||||
|
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
||||||
|
|
||||||
result = newHttpHeaders({
|
result = newHttpHeaders({
|
||||||
"connection": "keep-alive",
|
"connection": "keep-alive",
|
||||||
"authorization": auth,
|
"authorization": header,
|
||||||
"content-type": "application/json",
|
"content-type": "application/json",
|
||||||
"x-guest-token": if token == nil: "" else: token.tok,
|
|
||||||
"x-twitter-active-user": "yes",
|
"x-twitter-active-user": "yes",
|
||||||
"authority": "api.twitter.com",
|
"authority": "api.twitter.com",
|
||||||
"accept-encoding": "gzip",
|
"accept-encoding": "gzip",
|
||||||
|
@ -44,24 +62,24 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||||
"DNT": "1"
|
"DNT": "1"
|
||||||
})
|
})
|
||||||
|
|
||||||
template updateToken() =
|
template updateAccount() =
|
||||||
if resp.headers.hasKey(rlRemaining):
|
if resp.headers.hasKey(rlRemaining):
|
||||||
let
|
let
|
||||||
remaining = parseInt(resp.headers[rlRemaining])
|
remaining = parseInt(resp.headers[rlRemaining])
|
||||||
reset = parseInt(resp.headers[rlReset])
|
reset = parseInt(resp.headers[rlReset])
|
||||||
token.setRateLimit(api, remaining, reset)
|
account.setRateLimit(api, remaining, reset)
|
||||||
|
|
||||||
template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
||||||
once:
|
once:
|
||||||
pool = HttpPool()
|
pool = HttpPool()
|
||||||
|
|
||||||
var token = await getToken(api)
|
var account = await getGuestAccount(api)
|
||||||
if token.tok.len == 0:
|
if account.oauthToken.len == 0:
|
||||||
raise rateLimitError()
|
raise rateLimitError()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
var resp: AsyncResponse
|
var resp: AsyncResponse
|
||||||
var headers = genHeaders(token)
|
var headers = genHeaders($url, account.oauthToken, account.oauthSecret)
|
||||||
for key, value in additional_headers.pairs():
|
for key, value in additional_headers.pairs():
|
||||||
headers.add(key, value)
|
headers.add(key, value)
|
||||||
pool.use(headers):
|
pool.use(headers):
|
||||||
|
@ -86,19 +104,19 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
||||||
|
|
||||||
fetchBody
|
fetchBody
|
||||||
|
|
||||||
release(token, used=true)
|
release(account, used=true)
|
||||||
|
|
||||||
if resp.status == $Http400:
|
if resp.status == $Http400:
|
||||||
raise newException(InternalError, $url)
|
raise newException(InternalError, $url)
|
||||||
except InternalError as e:
|
except InternalError as e:
|
||||||
raise e
|
raise e
|
||||||
except BadClientError as e:
|
except BadClientError as e:
|
||||||
release(token, used=true)
|
release(account, used=true)
|
||||||
raise e
|
raise e
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
|
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", account.id, ", url: ", url
|
||||||
if "length" notin e.msg and "descriptor" notin e.msg:
|
if "length" notin e.msg and "descriptor" notin e.msg:
|
||||||
release(token, invalid=true)
|
release(account, invalid=true)
|
||||||
raise rateLimitError()
|
raise rateLimitError()
|
||||||
|
|
||||||
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
||||||
|
@ -116,12 +134,12 @@ proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders
|
||||||
echo resp.status, ": ", body, " --- url: ", url
|
echo resp.status, ": ", body, " --- url: ", url
|
||||||
result = newJNull()
|
result = newJNull()
|
||||||
|
|
||||||
updateToken()
|
updateAccount()
|
||||||
|
|
||||||
let error = result.getError
|
let error = result.getError
|
||||||
if error in {invalidToken, badToken}:
|
if error in {invalidToken, badToken}:
|
||||||
echo "fetch error: ", result.getError
|
echo "fetch error: ", result.getError
|
||||||
release(token, invalid=true)
|
release(account, invalid=true)
|
||||||
raise rateLimitError()
|
raise rateLimitError()
|
||||||
|
|
||||||
proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[string] {.async.} =
|
proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[string] {.async.} =
|
||||||
|
@ -130,11 +148,11 @@ proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHead
|
||||||
echo resp.status, ": ", result, " --- url: ", url
|
echo resp.status, ": ", result, " --- url: ", url
|
||||||
result.setLen(0)
|
result.setLen(0)
|
||||||
|
|
||||||
updateToken()
|
updateAccount()
|
||||||
|
|
||||||
if result.startsWith("{\"errors"):
|
if result.startsWith("{\"errors"):
|
||||||
let errors = result.fromJson(Errors)
|
let errors = result.fromJson(Errors)
|
||||||
if errors in {invalidToken, badToken}:
|
if errors in {invalidToken, badToken}:
|
||||||
echo "fetch error: ", errors
|
echo "fetch error: ", errors
|
||||||
release(token, invalid=true)
|
release(account, invalid=true)
|
||||||
raise rateLimitError()
|
raise rateLimitError()
|
||||||
|
|
|
@ -2,7 +2,8 @@
|
||||||
import uri, sequtils, strutils
|
import uri, sequtils, strutils
|
||||||
|
|
||||||
const
|
const
|
||||||
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
|
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
|
||||||
|
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
|
||||||
|
|
||||||
api = parseUri("https://api.twitter.com")
|
api = parseUri("https://api.twitter.com")
|
||||||
activate* = $(api / "1.1/guest/activate.json")
|
activate* = $(api / "1.1/guest/activate.json")
|
||||||
|
@ -11,10 +12,6 @@ const
|
||||||
|
|
||||||
timelineApi = api / "2/timeline"
|
timelineApi = api / "2/timeline"
|
||||||
favorites* = timelineApi / "favorites"
|
favorites* = timelineApi / "favorites"
|
||||||
userSearch* = api / "1.1/users/search.json"
|
|
||||||
tweetSearch* = api / "1.1/search/universal.json"
|
|
||||||
|
|
||||||
# oldUserTweets* = api / "2/timeline/profile"
|
|
||||||
|
|
||||||
graphql = api / "graphql"
|
graphql = api / "graphql"
|
||||||
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
||||||
|
@ -35,28 +32,20 @@ const
|
||||||
graphFollowing* = graphql / "JPZiqKjET7_M1r5Tlr8pyA/Following"
|
graphFollowing* = graphql / "JPZiqKjET7_M1r5Tlr8pyA/Following"
|
||||||
|
|
||||||
timelineParams* = {
|
timelineParams* = {
|
||||||
"cards_platform": "Web-13",
|
|
||||||
"tweet_mode": "extended",
|
|
||||||
"ui_lang": "en-US",
|
|
||||||
"send_error_codes": "1",
|
|
||||||
"simple_quoted_tweet": "1",
|
|
||||||
"skip_status": "1",
|
|
||||||
"include_blocked_by": "0",
|
|
||||||
"include_blocking": "0",
|
|
||||||
"include_can_dm": "0",
|
|
||||||
"include_can_media_tag": "1",
|
"include_can_media_tag": "1",
|
||||||
"include_cards": "1",
|
"include_cards": "1",
|
||||||
"include_composer_source": "0",
|
|
||||||
"include_entities": "1",
|
"include_entities": "1",
|
||||||
"include_ext_is_blue_verified": "1",
|
|
||||||
"include_ext_media_color": "0",
|
|
||||||
"include_followed_by": "0",
|
|
||||||
"include_mute_edge": "0",
|
|
||||||
"include_profile_interstitial_type": "0",
|
"include_profile_interstitial_type": "0",
|
||||||
"include_quote_count": "1",
|
"include_quote_count": "1",
|
||||||
"include_reply_count": "1",
|
"include_reply_count": "1",
|
||||||
"include_user_entities": "1",
|
"include_user_entities": "1",
|
||||||
"include_want_retweets": "0",
|
"include_ext_reply_count": "1",
|
||||||
|
"include_ext_is_blue_verified": "1",
|
||||||
|
"include_ext_media_color": "0",
|
||||||
|
"cards_platform": "Web-13",
|
||||||
|
"tweet_mode": "extended",
|
||||||
|
"send_error_codes": "1",
|
||||||
|
"simple_quoted_tweet": "1"
|
||||||
}.toSeq
|
}.toSeq
|
||||||
|
|
||||||
gqlFeatures* = """{
|
gqlFeatures* = """{
|
||||||
|
|
|
@ -1,7 +1,10 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import asyncdispatch, strformat, logging
|
import asyncdispatch, strformat, logging
|
||||||
|
import config
|
||||||
from net import Port
|
from net import Port
|
||||||
from htmlgen import a
|
from htmlgen import a
|
||||||
|
from os import getEnv
|
||||||
|
from json import parseJson
|
||||||
|
|
||||||
import jester
|
import jester
|
||||||
|
|
||||||
|
@ -14,6 +17,12 @@ import routes/[
|
||||||
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
|
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
|
||||||
const issuesUrl = "https://github.com/zedeus/nitter/issues"
|
const issuesUrl = "https://github.com/zedeus/nitter/issues"
|
||||||
|
|
||||||
|
let
|
||||||
|
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||||
|
accounts = parseJson(readFile(accountsPath))
|
||||||
|
|
||||||
|
initAccountPool(cfg, parseJson(readFile(accountsPath)))
|
||||||
|
|
||||||
if not cfg.enableDebug:
|
if not cfg.enableDebug:
|
||||||
# Silence Jester's query warning
|
# Silence Jester's query warning
|
||||||
addHandler(newConsoleLogger())
|
addHandler(newConsoleLogger())
|
||||||
|
@ -34,8 +43,6 @@ waitFor initRedisPool(cfg)
|
||||||
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
|
stdout.write &"Connected to Redis at {cfg.redisHost}:{cfg.redisPort}\n"
|
||||||
stdout.flushFile
|
stdout.flushFile
|
||||||
|
|
||||||
asyncCheck initTokenPool(cfg)
|
|
||||||
|
|
||||||
createUnsupportedRouter(cfg)
|
createUnsupportedRouter(cfg)
|
||||||
createResolverRouter(cfg)
|
createResolverRouter(cfg)
|
||||||
createPrefRouter(cfg)
|
createPrefRouter(cfg)
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import strutils, options, tables, times, math
|
import strutils, options, times, math, tables
|
||||||
import packedjson, packedjson/deserialiser
|
import packedjson, packedjson/deserialiser
|
||||||
import types, parserutils, utils
|
import types, parserutils, utils
|
||||||
import experimental/parser/unifiedcard
|
import experimental/parser/unifiedcard
|
||||||
|
@ -29,10 +29,8 @@ proc parseUser(js: JsonNode; id=""): User =
|
||||||
result.expandUserEntities(js)
|
result.expandUserEntities(js)
|
||||||
|
|
||||||
proc parseGraphUser(js: JsonNode): User =
|
proc parseGraphUser(js: JsonNode): User =
|
||||||
var user: JsonNode
|
var user = js{"user_result", "result"}
|
||||||
if "user_result" in js:
|
if user.isNull:
|
||||||
user = ? js{"user_result", "result"}
|
|
||||||
else:
|
|
||||||
user = ? js{"user_results", "result"}
|
user = ? js{"user_results", "result"}
|
||||||
result = parseUser(user{"legacy"})
|
result = parseUser(user{"legacy"})
|
||||||
|
|
||||||
|
@ -85,7 +83,7 @@ proc parseGif(js: JsonNode): Gif =
|
||||||
proc parseVideo(js: JsonNode): Video =
|
proc parseVideo(js: JsonNode): Video =
|
||||||
result = Video(
|
result = Video(
|
||||||
thumb: js{"media_url_https"}.getImageStr,
|
thumb: js{"media_url_https"}.getImageStr,
|
||||||
views: js{"ext", "mediaStats", "r", "ok", "viewCount"}.getStr($js{"mediaStats", "viewCount"}.getInt),
|
views: getVideoViewCount(js),
|
||||||
available: true,
|
available: true,
|
||||||
title: js{"ext_alt_text"}.getStr,
|
title: js{"ext_alt_text"}.getStr,
|
||||||
durationMs: js{"video_info", "duration_millis"}.getInt
|
durationMs: js{"video_info", "duration_millis"}.getInt
|
||||||
|
@ -586,8 +584,8 @@ proc parseGraphRetweetersTimeline*(js: JsonNode; root: string; after=""): UsersT
|
||||||
proc parseGraphFollowTimeline*(js: JsonNode; root: string; after=""): UsersTimeline =
|
proc parseGraphFollowTimeline*(js: JsonNode; root: string; after=""): UsersTimeline =
|
||||||
return parseGraphUsersTimeline(js{"data", "user", "result", "timeline", "timeline"}, after)
|
return parseGraphUsersTimeline(js{"data", "user", "result", "timeline", "timeline"}, after)
|
||||||
|
|
||||||
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
|
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
|
||||||
result = Timeline(beginning: after.len == 0)
|
result = Result[T](beginning: after.len == 0)
|
||||||
|
|
||||||
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
||||||
if instructions.len == 0:
|
if instructions.len == 0:
|
||||||
|
@ -596,15 +594,21 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
|
||||||
for instruction in instructions:
|
for instruction in instructions:
|
||||||
let typ = instruction{"type"}.getStr
|
let typ = instruction{"type"}.getStr
|
||||||
if typ == "TimelineAddEntries":
|
if typ == "TimelineAddEntries":
|
||||||
for e in instructions[0]{"entries"}:
|
for e in instruction{"entries"}:
|
||||||
let entryId = e{"entryId"}.getStr
|
let entryId = e{"entryId"}.getStr
|
||||||
if entryId.startsWith("tweet"):
|
when T is Tweets:
|
||||||
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
|
if entryId.startsWith("tweet"):
|
||||||
let tweet = parseGraphTweet(tweetResult)
|
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
|
||||||
if not tweet.available:
|
let tweet = parseGraphTweet(tweetRes)
|
||||||
tweet.id = parseBiggestInt(entryId.getId())
|
if not tweet.available:
|
||||||
result.content.add tweet
|
tweet.id = parseBiggestInt(entryId.getId())
|
||||||
elif entryId.startsWith("cursor-bottom"):
|
result.content.add tweet
|
||||||
|
elif T is User:
|
||||||
|
if entryId.startsWith("user"):
|
||||||
|
with userRes, e{"content", "itemContent"}:
|
||||||
|
result.content.add parseGraphUser(userRes)
|
||||||
|
|
||||||
|
if entryId.startsWith("cursor-bottom"):
|
||||||
result.bottom = e{"content", "value"}.getStr
|
result.bottom = e{"content", "value"}.getStr
|
||||||
elif typ == "TimelineReplaceEntry":
|
elif typ == "TimelineReplaceEntry":
|
||||||
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
|
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
|
||||||
|
|
|
@ -36,7 +36,7 @@ template with*(ident, value, body): untyped =
|
||||||
template with*(ident; value: JsonNode; body): untyped =
|
template with*(ident; value: JsonNode; body): untyped =
|
||||||
if true:
|
if true:
|
||||||
let ident {.inject.} = value
|
let ident {.inject.} = value
|
||||||
if value.notNull: body
|
if value.kind != JNull: body
|
||||||
|
|
||||||
template getCursor*(js: JsonNode): string =
|
template getCursor*(js: JsonNode): string =
|
||||||
js{"content", "operation", "cursor", "value"}.getStr
|
js{"content", "operation", "cursor", "value"}.getStr
|
||||||
|
@ -148,6 +148,12 @@ proc getMp4Resolution*(url: string): int =
|
||||||
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
# cannot determine resolution (e.g. m3u8/non-mp4 video)
|
||||||
return 0
|
return 0
|
||||||
|
|
||||||
|
proc getVideoViewCount*(js: JsonNode): string =
|
||||||
|
with stats, js{"ext_media_stats"}:
|
||||||
|
return stats{"view_count"}.getStr($stats{"viewCount"}.getInt)
|
||||||
|
|
||||||
|
return $js{"mediaStats", "viewCount"}.getInt(0)
|
||||||
|
|
||||||
proc extractSlice(js: JsonNode): Slice[int] =
|
proc extractSlice(js: JsonNode): Slice[int] =
|
||||||
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
result = js["indices"][0].getInt ..< js["indices"][1].getInt
|
||||||
|
|
||||||
|
|
|
@ -147,15 +147,15 @@ proc getCachedUsername*(userId: string): Future[string] {.async.} =
|
||||||
if result.len > 0 and user.id.len > 0:
|
if result.len > 0 and user.id.len > 0:
|
||||||
await all(cacheUserId(result, user.id), cache(user))
|
await all(cacheUserId(result, user.id), cache(user))
|
||||||
|
|
||||||
proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
# proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
|
||||||
if id == 0: return
|
# if id == 0: return
|
||||||
let tweet = await get(id.tweetKey)
|
# let tweet = await get(id.tweetKey)
|
||||||
if tweet != redisNil:
|
# if tweet != redisNil:
|
||||||
tweet.deserialize(Tweet)
|
# tweet.deserialize(Tweet)
|
||||||
else:
|
# else:
|
||||||
result = await getGraphTweetResult($id)
|
# result = await getGraphTweetResult($id)
|
||||||
if not result.isNil:
|
# if not result.isNil:
|
||||||
await cache(result)
|
# await cache(result)
|
||||||
|
|
||||||
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
proc getCachedPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||||
if name.len == 0: return
|
if name.len == 0: return
|
||||||
|
|
|
@ -27,7 +27,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
|
||||||
else:
|
else:
|
||||||
var q = query
|
var q = query
|
||||||
q.fromUser = names
|
q.fromUser = names
|
||||||
profile.tweets = await getTweetSearch(q, after)
|
profile.tweets = await getGraphTweetSearch(q, after)
|
||||||
# this is kinda dumb
|
# this is kinda dumb
|
||||||
profile.user = User(
|
profile.user = User(
|
||||||
username: name,
|
username: name,
|
||||||
|
@ -76,7 +76,7 @@ proc createRssRouter*(cfg: Config) =
|
||||||
if rss.cursor.len > 0:
|
if rss.cursor.len > 0:
|
||||||
respRss(rss, "Search")
|
respRss(rss, "Search")
|
||||||
|
|
||||||
let tweets = await getTweetSearch(query, cursor)
|
let tweets = await getGraphTweetSearch(query, cursor)
|
||||||
rss.cursor = tweets.bottom
|
rss.cursor = tweets.bottom
|
||||||
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
|
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
|
||||||
|
|
||||||
|
|
|
@ -29,13 +29,13 @@ proc createSearchRouter*(cfg: Config) =
|
||||||
redirect("/" & q)
|
redirect("/" & q)
|
||||||
var users: Result[User]
|
var users: Result[User]
|
||||||
try:
|
try:
|
||||||
users = await getUserSearch(query, getCursor())
|
users = await getGraphUserSearch(query, getCursor())
|
||||||
except InternalError:
|
except InternalError:
|
||||||
users = Result[User](beginning: true, query: query)
|
users = Result[User](beginning: true, query: query)
|
||||||
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
|
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
|
||||||
of tweets:
|
of tweets:
|
||||||
let
|
let
|
||||||
tweets = await getTweetSearch(query, getCursor())
|
tweets = await getGraphTweetSearch(query, getCursor())
|
||||||
rss = "/search/rss?" & genQueryUrl(query)
|
rss = "/search/rss?" & genQueryUrl(query)
|
||||||
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
|
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
|
||||||
request, cfg, prefs, title, rss=rss)
|
request, cfg, prefs, title, rss=rss)
|
||||||
|
|
|
@ -54,33 +54,22 @@ proc fetchProfile*(after: string; query: Query; cfg: Config; skipRail=false;
|
||||||
|
|
||||||
result =
|
result =
|
||||||
case query.kind
|
case query.kind
|
||||||
# of posts: await getTimeline(userId, after)
|
of posts: await getGraphUserTweets(userId, TimelineKind.tweets, after)
|
||||||
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
|
of replies: await getGraphUserTweets(userId, TimelineKind.replies, after)
|
||||||
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
|
of media: await getGraphUserTweets(userId, TimelineKind.media, after)
|
||||||
of favorites: await getFavorites(userId, cfg, after)
|
of favorites: await getFavorites(userId, cfg, after)
|
||||||
else: Profile(tweets: await getTweetSearch(query, after))
|
else: Profile(tweets: await getGraphTweetSearch(query, after))
|
||||||
|
|
||||||
result.user = await user
|
result.user = await user
|
||||||
result.photoRail = await rail
|
result.photoRail = await rail
|
||||||
|
|
||||||
result.tweets.query = query
|
result.tweets.query = query
|
||||||
|
|
||||||
if result.user.protected or result.user.suspended:
|
|
||||||
return
|
|
||||||
|
|
||||||
if not skipPinned and query.kind == posts and
|
|
||||||
result.user.pinnedTweet > 0 and after.len == 0:
|
|
||||||
let tweet = await getCachedTweet(result.user.pinnedTweet)
|
|
||||||
if not tweet.isNil:
|
|
||||||
tweet.pinned = true
|
|
||||||
tweet.user = result.user
|
|
||||||
result.pinned = some tweet
|
|
||||||
|
|
||||||
proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
|
proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
|
||||||
rss, after: string): Future[string] {.async.} =
|
rss, after: string): Future[string] {.async.} =
|
||||||
if query.fromUser.len != 1:
|
if query.fromUser.len != 1:
|
||||||
let
|
let
|
||||||
timeline = await getTweetSearch(query, after)
|
timeline = await getGraphTweetSearch(query, after)
|
||||||
html = renderTweetSearch(timeline, prefs, getPath())
|
html = renderTweetSearch(timeline, prefs, getPath())
|
||||||
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
|
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
|
||||||
|
|
||||||
|
@ -142,7 +131,7 @@ proc createTimelineRouter*(cfg: Config) =
|
||||||
# used for the infinite scroll feature
|
# used for the infinite scroll feature
|
||||||
if @"scroll".len > 0:
|
if @"scroll".len > 0:
|
||||||
if query.fromUser.len != 1:
|
if query.fromUser.len != 1:
|
||||||
var timeline = (await getGraphSearch(query, after)).tweets
|
var timeline = await getGraphTweetSearch(query, after)
|
||||||
if timeline.content.len == 0: resp Http404
|
if timeline.content.len == 0: resp Http404
|
||||||
timeline.beginning = true
|
timeline.beginning = true
|
||||||
resp $renderTweetSearch(timeline, prefs, getPath())
|
resp $renderTweetSearch(timeline, prefs, getPath())
|
||||||
|
|
178
src/tokens.nim
178
src/tokens.nim
|
@ -1,23 +1,18 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import asyncdispatch, httpclient, times, sequtils, json, random
|
import asyncdispatch, times, json, random, strutils, tables
|
||||||
import strutils, tables
|
import types
|
||||||
import types, consts
|
|
||||||
|
|
||||||
|
# max requests at a time per account to avoid race conditions
|
||||||
const
|
const
|
||||||
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
|
maxConcurrentReqs = 5
|
||||||
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
|
dayInSeconds = 24 * 60 * 60
|
||||||
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
|
|
||||||
failDelay = initDuration(minutes=30)
|
|
||||||
|
|
||||||
var
|
var
|
||||||
tokenPool: seq[Token]
|
accountPool: seq[GuestAccount]
|
||||||
lastFailed: Time
|
|
||||||
enableLogging = false
|
enableLogging = false
|
||||||
|
|
||||||
let headers = newHttpHeaders({"authorization": auth})
|
|
||||||
|
|
||||||
template log(str) =
|
template log(str) =
|
||||||
if enableLogging: echo "[tokens] ", str
|
if enableLogging: echo "[accounts] ", str
|
||||||
|
|
||||||
proc getPoolJson*(): JsonNode =
|
proc getPoolJson*(): JsonNode =
|
||||||
var
|
var
|
||||||
|
@ -26,142 +21,111 @@ proc getPoolJson*(): JsonNode =
|
||||||
totalPending = 0
|
totalPending = 0
|
||||||
reqsPerApi: Table[string, int]
|
reqsPerApi: Table[string, int]
|
||||||
|
|
||||||
for token in tokenPool:
|
let now = epochTime().int
|
||||||
totalPending.inc(token.pending)
|
|
||||||
list[token.tok] = %*{
|
for account in accountPool:
|
||||||
|
totalPending.inc(account.pending)
|
||||||
|
list[account.id] = %*{
|
||||||
"apis": newJObject(),
|
"apis": newJObject(),
|
||||||
"pending": token.pending,
|
"pending": account.pending,
|
||||||
"init": $token.init,
|
|
||||||
"lastUse": $token.lastUse
|
|
||||||
}
|
}
|
||||||
|
|
||||||
for api in token.apis.keys:
|
for api in account.apis.keys:
|
||||||
list[token.tok]["apis"][$api] = %token.apis[api]
|
let obj = %*{}
|
||||||
|
if account.apis[api].limited:
|
||||||
|
obj["limited"] = %true
|
||||||
|
|
||||||
|
if account.apis[api].reset > now.int:
|
||||||
|
obj["remaining"] = %account.apis[api].remaining
|
||||||
|
|
||||||
|
list[account.id]["apis"][$api] = obj
|
||||||
|
|
||||||
|
if "remaining" notin obj:
|
||||||
|
continue
|
||||||
|
|
||||||
let
|
let
|
||||||
maxReqs =
|
maxReqs =
|
||||||
case api
|
case api
|
||||||
of Api.search: 100000
|
of Api.search: 50
|
||||||
of Api.photoRail: 180
|
of Api.photoRail: 180
|
||||||
of Api.timeline: 187
|
of Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
|
||||||
of Api.userTweets: 300
|
Api.userRestId, Api.userScreenName,
|
||||||
of Api.userTweetsAndReplies, Api.userRestId,
|
Api.tweetDetail, Api.tweetResult,
|
||||||
Api.userScreenName, Api.tweetDetail, Api.tweetResult: 500
|
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.favorites, Api.retweeters, Api.favoriters, Api.following, Api.followers: 500
|
||||||
of Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.userMedia, Api.favorites, Api.retweeters, Api.favoriters: 500
|
reqs = maxReqs - account.apis[api].remaining
|
||||||
of Api.userSearch: 900
|
|
||||||
else: 180
|
|
||||||
reqs = maxReqs - token.apis[api].remaining
|
|
||||||
|
|
||||||
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
|
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
|
||||||
totalReqs.inc(reqs)
|
totalReqs.inc(reqs)
|
||||||
|
|
||||||
return %*{
|
return %*{
|
||||||
"amount": tokenPool.len,
|
"amount": accountPool.len,
|
||||||
"requests": totalReqs,
|
"requests": totalReqs,
|
||||||
"pending": totalPending,
|
"pending": totalPending,
|
||||||
"apis": reqsPerApi,
|
"apis": reqsPerApi,
|
||||||
"tokens": list
|
"accounts": list
|
||||||
}
|
}
|
||||||
|
|
||||||
proc rateLimitError*(): ref RateLimitError =
|
proc rateLimitError*(): ref RateLimitError =
|
||||||
newException(RateLimitError, "rate limited")
|
newException(RateLimitError, "rate limited")
|
||||||
|
|
||||||
proc fetchToken(): Future[Token] {.async.} =
|
proc isLimited(account: GuestAccount; api: Api): bool =
|
||||||
if getTime() - lastFailed < failDelay:
|
if account.isNil:
|
||||||
raise rateLimitError()
|
|
||||||
|
|
||||||
let client = newAsyncHttpClient(headers=headers)
|
|
||||||
|
|
||||||
try:
|
|
||||||
let
|
|
||||||
resp = await client.postContent(activate)
|
|
||||||
tokNode = parseJson(resp)["guest_token"]
|
|
||||||
tok = tokNode.getStr($(tokNode.getInt))
|
|
||||||
time = getTime()
|
|
||||||
|
|
||||||
return Token(tok: tok, init: time, lastUse: time)
|
|
||||||
except Exception as e:
|
|
||||||
echo "[tokens] fetching token failed: ", e.msg
|
|
||||||
if "Try again" notin e.msg:
|
|
||||||
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
|
|
||||||
lastFailed = getTime()
|
|
||||||
finally:
|
|
||||||
client.close()
|
|
||||||
|
|
||||||
proc expired(token: Token): bool =
|
|
||||||
let time = getTime()
|
|
||||||
token.init < time - maxAge or token.lastUse < time - maxLastUse
|
|
||||||
|
|
||||||
proc isLimited(token: Token; api: Api): bool =
|
|
||||||
if token.isNil or token.expired:
|
|
||||||
return true
|
return true
|
||||||
|
|
||||||
if api in token.apis:
|
if api in account.apis:
|
||||||
let limit = token.apis[api]
|
let limit = account.apis[api]
|
||||||
return (limit.remaining <= 10 and limit.reset > epochTime().int)
|
|
||||||
|
if limit.limited and (epochTime().int - limit.limitedAt) > dayInSeconds:
|
||||||
|
account.apis[api].limited = false
|
||||||
|
echo "account limit reset, api: ", api, ", id: ", account.id
|
||||||
|
|
||||||
|
return limit.limited or (limit.remaining <= 10 and limit.reset > epochTime().int)
|
||||||
else:
|
else:
|
||||||
return false
|
return false
|
||||||
|
|
||||||
proc isReady(token: Token; api: Api): bool =
|
proc isReady(account: GuestAccount; api: Api): bool =
|
||||||
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
|
not (account.isNil or account.pending > maxConcurrentReqs or account.isLimited(api))
|
||||||
|
|
||||||
proc release*(token: Token; used=false; invalid=false) =
|
proc release*(account: GuestAccount; used=false; invalid=false) =
|
||||||
if token.isNil: return
|
if account.isNil: return
|
||||||
if invalid or token.expired:
|
if invalid:
|
||||||
if invalid: log "discarding invalid token"
|
log "discarding invalid account: " & account.id
|
||||||
elif token.expired: log "discarding expired token"
|
|
||||||
|
|
||||||
let idx = tokenPool.find(token)
|
let idx = accountPool.find(account)
|
||||||
if idx > -1: tokenPool.delete(idx)
|
if idx > -1: accountPool.delete(idx)
|
||||||
elif used:
|
elif used:
|
||||||
dec token.pending
|
dec account.pending
|
||||||
token.lastUse = getTime()
|
|
||||||
|
|
||||||
proc getToken*(api: Api): Future[Token] {.async.} =
|
proc getGuestAccount*(api: Api): Future[GuestAccount] {.async.} =
|
||||||
for i in 0 ..< tokenPool.len:
|
for i in 0 ..< accountPool.len:
|
||||||
if result.isReady(api): break
|
if result.isReady(api): break
|
||||||
release(result)
|
release(result)
|
||||||
result = tokenPool.sample()
|
result = accountPool.sample()
|
||||||
|
|
||||||
if not result.isReady(api):
|
if not result.isNil and result.isReady(api):
|
||||||
release(result)
|
|
||||||
result = await fetchToken()
|
|
||||||
log "added new token to pool"
|
|
||||||
tokenPool.add result
|
|
||||||
|
|
||||||
if not result.isNil:
|
|
||||||
inc result.pending
|
inc result.pending
|
||||||
else:
|
else:
|
||||||
|
log "no accounts available for API: " & $api
|
||||||
raise rateLimitError()
|
raise rateLimitError()
|
||||||
|
|
||||||
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
|
proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) =
|
||||||
# avoid undefined behavior in race conditions
|
# avoid undefined behavior in race conditions
|
||||||
if api in token.apis:
|
if api in account.apis:
|
||||||
let limit = token.apis[api]
|
let limit = account.apis[api]
|
||||||
if limit.reset >= reset and limit.remaining < remaining:
|
if limit.reset >= reset and limit.remaining < remaining:
|
||||||
return
|
return
|
||||||
|
if limit.reset == reset and limit.remaining >= remaining:
|
||||||
|
account.apis[api].remaining = remaining
|
||||||
|
return
|
||||||
|
|
||||||
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
account.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
||||||
|
|
||||||
proc poolTokens*(amount: int) {.async.} =
|
proc initAccountPool*(cfg: Config; accounts: JsonNode) =
|
||||||
var futs: seq[Future[Token]]
|
|
||||||
for i in 0 ..< amount:
|
|
||||||
futs.add fetchToken()
|
|
||||||
|
|
||||||
for token in futs:
|
|
||||||
var newToken: Token
|
|
||||||
|
|
||||||
try: newToken = await token
|
|
||||||
except: discard
|
|
||||||
|
|
||||||
if not newToken.isNil:
|
|
||||||
log "added new token to pool"
|
|
||||||
tokenPool.add newToken
|
|
||||||
|
|
||||||
proc initTokenPool*(cfg: Config) {.async.} =
|
|
||||||
enableLogging = cfg.enableDebug
|
enableLogging = cfg.enableDebug
|
||||||
|
|
||||||
while true:
|
for account in accounts:
|
||||||
if tokenPool.countIt(not it.isLimited(Api.timeline)) < cfg.minTokens:
|
accountPool.add GuestAccount(
|
||||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
id: account{"user", "id_str"}.getStr,
|
||||||
await sleepAsync(2000)
|
oauthToken: account{"oauth_token"}.getStr,
|
||||||
|
oauthSecret: account{"oauth_token_secret"}.getStr,
|
||||||
|
)
|
||||||
|
|
|
@ -17,10 +17,8 @@ type
|
||||||
Api* {.pure.} = enum
|
Api* {.pure.} = enum
|
||||||
tweetDetail
|
tweetDetail
|
||||||
tweetResult
|
tweetResult
|
||||||
timeline
|
|
||||||
photoRail
|
photoRail
|
||||||
search
|
search
|
||||||
userSearch
|
|
||||||
list
|
list
|
||||||
listBySlug
|
listBySlug
|
||||||
listMembers
|
listMembers
|
||||||
|
@ -39,10 +37,14 @@ type
|
||||||
RateLimit* = object
|
RateLimit* = object
|
||||||
remaining*: int
|
remaining*: int
|
||||||
reset*: int
|
reset*: int
|
||||||
|
limited*: bool
|
||||||
|
limitedAt*: int
|
||||||
|
|
||||||
Token* = ref object
|
GuestAccount* = ref object
|
||||||
tok*: string
|
id*: string
|
||||||
init*: Time
|
oauthToken*: string
|
||||||
|
oauthSecret*: string
|
||||||
|
# init*: Time
|
||||||
lastUse*: Time
|
lastUse*: Time
|
||||||
pending*: int
|
pending*: int
|
||||||
apis*: Table[Api, RateLimit]
|
apis*: Table[Api, RateLimit]
|
||||||
|
|
|
@ -4,7 +4,7 @@ from parameterized import parameterized
|
||||||
profiles = [
|
profiles = [
|
||||||
['mobile_test', 'Test account',
|
['mobile_test', 'Test account',
|
||||||
'Test Account. test test Testing username with @mobile_test_2 and a #hashtag',
|
'Test Account. test test Testing username with @mobile_test_2 and a #hashtag',
|
||||||
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '100'],
|
'San Francisco, CA', 'example.com/foobar', 'Joined October 2009', '98'],
|
||||||
['mobile_test_2', 'mobile test 2', '', '', '', 'Joined January 2011', '13']
|
['mobile_test_2', 'mobile test 2', '', '', '', 'Joined January 2011', '13']
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
|
@ -12,12 +12,7 @@ empty = [['emptyuser'], ['mobile_test_10']]
|
||||||
|
|
||||||
protected = [['mobile_test_7'], ['Empty_user']]
|
protected = [['mobile_test_7'], ['Empty_user']]
|
||||||
|
|
||||||
photo_rail = [['mobile_test', [
|
photo_rail = [['mobile_test', ['Bo0nDsYIYAIjqVn', 'BoQbwJAIUAA0QCY', 'BoQbRQxIIAA3FWD', 'Bn8Qh8iIIAABXrG']]]
|
||||||
'BzUnaDFCUAAmrjs', 'Bo0nDsYIYAIjqVn', 'Bos--KNIQAAA7Li', 'Boq1sDJIYAAxaoi',
|
|
||||||
'BonISmPIEAAhP3G', 'BoQbwJAIUAA0QCY', 'BoQbRQxIIAA3FWD', 'Bn8Qh8iIIAABXrG',
|
|
||||||
'Bn8QIG3IYAA0IGT', 'Bn8O3QeIUAAONai', 'Bn8NGViIAAATNG4', 'BkKovdrCUAAEz79',
|
|
||||||
'BkKoe_oCIAASAqr', 'BkKoRLNCAAAYfDf', 'BkKndxoCQAE1vFt', 'BPEmIbYCMAE44dl'
|
|
||||||
]]]
|
|
||||||
|
|
||||||
|
|
||||||
class TweetTest(BaseTestCase):
|
class TweetTest(BaseTestCase):
|
||||||
|
@ -60,10 +55,10 @@ class TweetTest(BaseTestCase):
|
||||||
self.assert_element_absent(Timeline.older)
|
self.assert_element_absent(Timeline.older)
|
||||||
self.assert_element_absent(Timeline.end)
|
self.assert_element_absent(Timeline.end)
|
||||||
|
|
||||||
@parameterized.expand(photo_rail)
|
#@parameterized.expand(photo_rail)
|
||||||
def test_photo_rail(self, username, images):
|
#def test_photo_rail(self, username, images):
|
||||||
self.open_nitter(username)
|
#self.open_nitter(username)
|
||||||
self.assert_element_visible(Timeline.photo_rail)
|
#self.assert_element_visible(Timeline.photo_rail)
|
||||||
for i, url in enumerate(images):
|
#for i, url in enumerate(images):
|
||||||
img = self.get_attribute(Timeline.photo_rail + f' a:nth-child({i + 1}) img', 'src')
|
#img = self.get_attribute(Timeline.photo_rail + f' a:nth-child({i + 1}) img', 'src')
|
||||||
self.assertIn(url, img)
|
#self.assertIn(url, img)
|
||||||
|
|
|
@ -28,14 +28,14 @@ video_m3u8 = [
|
||||||
]
|
]
|
||||||
|
|
||||||
gallery = [
|
gallery = [
|
||||||
['mobile_test/status/451108446603980803', [
|
# ['mobile_test/status/451108446603980803', [
|
||||||
['BkKovdrCUAAEz79', 'BkKovdcCEAAfoBO']
|
# ['BkKovdrCUAAEz79', 'BkKovdcCEAAfoBO']
|
||||||
]],
|
# ]],
|
||||||
|
|
||||||
['mobile_test/status/471539824713691137', [
|
# ['mobile_test/status/471539824713691137', [
|
||||||
['Bos--KNIQAAA7Li', 'Bos--FAIAAAWpah'],
|
# ['Bos--KNIQAAA7Li', 'Bos--FAIAAAWpah'],
|
||||||
['Bos--IqIQAAav23']
|
# ['Bos--IqIQAAav23']
|
||||||
]],
|
# ]],
|
||||||
|
|
||||||
['mobile_test/status/469530783384743936', [
|
['mobile_test/status/469530783384743936', [
|
||||||
['BoQbwJAIUAA0QCY', 'BoQbwN1IMAAuTiP'],
|
['BoQbwJAIUAA0QCY', 'BoQbwN1IMAAuTiP'],
|
||||||
|
|
Loading…
Reference in a new issue