Merge pull request #2 from PrivacyDevel/graphql

Graphql
This commit is contained in:
PrivacyDevel 2023-05-26 21:34:07 +00:00 committed by GitHub
commit 7753e44d36
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
29 changed files with 409 additions and 305 deletions

View file

@ -1,11 +1,11 @@
[Server] [Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0" address = "0.0.0.0"
port = 8080 port = 8080
https = false # disable to enable cookies when not using https https = false # disable to enable cookies when not using https
httpMaxConnections = 100 httpMaxConnections = 100
staticDir = "./public" staticDir = "./public"
title = "nitter"
hostname = "nitter.net"
[Cache] [Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high) listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
@ -13,9 +13,9 @@ rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379 redisPort = 6379
redisPassword = "" redisPassword = ""
redisConnections = 20 # connection pool size redisConnections = 20 # minimum open connections in pool
redisMaxConnections = 30 redisMaxConnections = 30
# max, new connections are opened when none are available, but if the pool size # new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless # goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second # you receive tons of requests per second
@ -23,15 +23,15 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints enableDebug = false # enable request logs and debug endpoints (/.tokens)
proxy = "" # http/https url, SOCKS proxies are not supported proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = "" proxyAuth = ""
tokenCount = 10 tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests, # minimum amount of usable tokens. tokens are used to authorize API requests,
# but they expire after ~1 hour, and have a limit of 187 requests. # but they expire after ~1 hour, and have a limit of 500 requests per endpoint.
# the limit gets reset every 15 minutes, and the pool is filled up so there's # the limits reset every 15 minutes, and the pool is filled up so there's
# always at least $tokenCount usable tokens. again, only increase this if # always at least `tokenCount` usable tokens. only increase this if you receive
# you receive major bursts all the time # major bursts all the time and don't have a rate limiting setup via e.g. nginx
#cookieHeader = "ct0=XXXXXXXXXXXXXXXXX; auth_token=XXXXXXXXXXXXXX" # authentication cookie of a logged in account, required for the likes tab and NSFW content #cookieHeader = "ct0=XXXXXXXXXXXXXXXXX; auth_token=XXXXXXXXXXXXXX" # authentication cookie of a logged in account, required for the likes tab and NSFW content
#xCsrfToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # required for the likes tab and NSFW content #xCsrfToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # required for the likes tab and NSFW content

View file

@ -12,7 +12,7 @@ bin = @["nitter"]
requires "nim >= 1.4.8" requires "nim >= 1.4.8"
requires "jester#baca3f" requires "jester#baca3f"
requires "karax#9ee695b" requires "karax#5cf360c"
requires "sass#7dfdd03" requires "sass#7dfdd03"
requires "nimcrypto#4014ef9" requires "nimcrypto#4014ef9"
requires "markdown#158efe3" requires "markdown#158efe3"

View file

@ -8,64 +8,68 @@ import config
proc getGraphUser*(username: string): Future[User] {.async.} = proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return if username.len == 0: return
let let
variables = """{ variables = %*{"screen_name": username}
"screen_name": "$1", params = {"variables": $variables, "features": gqlFeatures}
"withSafetyModeUserFields": false, js = await fetchRaw(graphUser ? params, Api.userScreenName)
"withSuperFollowsUserFields": false
}""" % [username]
js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
result = parseGraphUser(js) result = parseGraphUser(js)
proc getGraphUserById*(id: string): Future[User] {.async.} = proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return if id.len == 0 or id.any(c => not c.isDigit): return
let let
variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id] variables = %*{"userId": id}
js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId) params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUserById ? params, Api.userRestId)
result = parseGraphUser(js) result = parseGraphUser(js)
proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = userTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
(url, apiId) = case kind
of TimelineKind.tweets: (graphUserTweets, Api.userTweets)
of TimelineKind.replies: (graphUserTweetsAndReplies, Api.userTweetsAndReplies)
of TimelineKind.media: (graphUserMedia, Api.userMedia)
js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after)
proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = listTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphListTweets ? params, Api.listTweets)
result = parseGraphTimeline(js, "list", after)
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} = proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let let
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false} variables = %*{"screenName": name, "listSlug": list}
url = graphListBySlug ? {"variables": $variables} params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(url, Api.listBySlug)) result = parseGraphList(await fetch(graphListBySlug ? params, Api.listBySlug))
proc getGraphList*(id: string): Future[List] {.async.} = proc getGraphList*(id: string): Future[List] {.async.} =
let let
variables = %*{"listId": id, "withHighlightedLabel": false} variables = %*{"listId": id}
url = graphList ? {"variables": $variables} params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(url, Api.list)) result = parseGraphList(await fetch(graphListById ? params, Api.list))
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} = proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return if list.id.len == 0: return
var var
variables = %*{ variables = %*{
"listId": list.id, "listId": list.id,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false, "withBirdwatchPivots": false,
"withDownvotePerspective": false, "withDownvotePerspective": false,
"withReactionsMetadata": false, "withReactionsMetadata": false,
"withReactionsPerspective": false, "withReactionsPerspective": false
"withSuperFollowsTweetFields": false
} }
if after.len > 0: if after.len > 0:
variables["cursor"] = % after variables["cursor"] = % after
let url = graphListMembers ? {"variables": $variables} let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after) result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
url = timeline / (id & ".json") ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getFavorites*(id: string; cfg: Config; after=""): Future[Timeline] {.async.} = proc getFavorites*(id: string; cfg: Config; after=""): Future[Timeline] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
@ -73,55 +77,20 @@ proc getFavorites*(id: string; cfg: Config; after=""): Future[Timeline] {.async.
url = consts.favorites / (id & ".json") ? ps url = consts.favorites / (id & ".json") ? ps
result = parseTimeline(await fetch(url, Api.favorites), after) result = parseTimeline(await fetch(url, Api.favorites), after)
proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} = proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return if id.len == 0: return
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let let
ps = genParams({"screen_name": name, "trim_user": "true"}, variables = tweetResultVariables % id
count="18", ext=false) params = {"variables": variables, "features": gqlFeatures}
url = photoRail ? ps js = await fetch(graphTweetResult ? params, Api.tweetResult)
result = parsePhotoRail(await fetch(url, Api.timeline)) result = parseGraphTweetResult(js)
proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
let additional_headers = newHttpHeaders()
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
if len(cfg.cookieHeader) != 0:
additional_headers.add("Cookie", cfg.cookieHeader)
if len(cfg.xCsrfToken) != 0:
additional_headers.add("x-csrf-token", cfg.xCsrfToken)
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTimeline
fetchFunc = fetch
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)
let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search, additional_headers), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} = proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
if id.len == 0: return if id.len == 0: return
let let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: "" cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = tweetVariables % [id, cursor] variables = tweetVariables % [id, cursor]
params = {"variables": variables, "features": tweetFeatures} params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweet ? params, Api.tweetDetail) js = await fetch(graphTweet ? params, Api.tweetDetail)
result = parseGraphConversation(js, id) result = parseGraphConversation(js, id)
@ -134,9 +103,51 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0: if after.len > 0:
result.replies = await getReplies(id, after) result.replies = await getReplies(id, after)
proc getStatus*(id: string): Future[Tweet] {.async.} = proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
let url = status / (id & ".json") ? genParams() let q = genQueryParam(query)
result = parseStatus(await fetch(url, Api.status)) if q.len == 0 or q == emptyQuery:
return Result[Tweet](query: query, beginning: true)
var
variables = %*{
"rawQuery": q,
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after)
result.query = query
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)
var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}
result = parseUsers(await fetchRaw(url, Api.userSearch))
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} = proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0) let client = newAsyncHttpClient(maxRedirects=0)

View file

@ -18,8 +18,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p result &= p
if ext: if ext:
result &= ("ext", "mediaStats") result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true") result &= ("include_ext_alt_text", "1")
result &= ("include_ext_media_availability", "true") result &= ("include_ext_media_availability", "1")
if count.len > 0: if count.len > 0:
result &= ("count", count) result &= ("count", count)
if cursor.len > 0: if cursor.len > 0:
@ -45,7 +45,7 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
}) })
template updateToken() = template updateToken() =
if api != Api.search and resp.headers.hasKey(rlRemaining): if resp.headers.hasKey(rlRemaining):
let let
remaining = parseInt(resp.headers[rlRemaining]) remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset]) reset = parseInt(resp.headers[rlReset])
@ -71,17 +71,12 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
getContent() getContent()
# Twitter randomly returns 401 errors with an empty body quite often.
# Retrying the request usually works.
if resp.status == "401 Unauthorized" and result.len == 0:
getContent()
if resp.status == $Http429: if resp.status == $Http429:
raise rateLimitError() raise rateLimitError()
if resp.status == $Http503: if resp.status == $Http503:
badClient = true badClient = true
raise newException(InternalError, result) raise newException(BadClientError, "Bad client")
if result.len > 0: if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip": if resp.headers.getOrDefault("content-encoding") == "gzip":
@ -97,6 +92,9 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
raise newException(InternalError, $url) raise newException(InternalError, $url)
except InternalError as e: except InternalError as e:
raise e raise e
except BadClientError as e:
release(token, used=true)
raise e
except Exception as e: except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg: if "length" notin e.msg and "descriptor" notin e.msg:

View file

@ -1,31 +1,31 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import uri, sequtils import uri, sequtils, strutils
const const
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw" auth* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
api = parseUri("https://api.twitter.com") api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json") activate* = $(api / "1.1/guest/activate.json")
userShow* = api / "1.1/users/show.json"
photoRail* = api / "1.1/statuses/media_timeline.json" photoRail* = api / "1.1/statuses/media_timeline.json"
status* = api / "1.1/statuses/show"
search* = api / "2/search/adaptive.json"
timelineApi = api / "2/timeline" timelineApi = api / "2/timeline"
timeline* = timelineApi / "profile"
mediaTimeline* = timelineApi / "media"
favorites* = timelineApi / "favorites" favorites* = timelineApi / "favorites"
listTimeline* = timelineApi / "list.json" userSearch* = api / "1.1/users/search.json"
tweet* = timelineApi / "conversation"
graphql = api / "graphql" graphql = api / "graphql"
graphTweet* = graphql / "6lWNh96EXDJCXl05SAtn_g/TweetDetail" graphUser* = graphql / "8mPfHBetXOg-EHAyeVxUoA/UserByScreenName"
graphUser* = graphql / "7mjxD3-C6BxitPMVQ6w0-Q/UserByScreenName" graphUserById* = graphql / "nI8WydSd-X-lQIVo6bdktQ/UserByRestId"
graphUserById* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId" graphUserTweets* = graphql / "9rys0A7w1EyqVd2ME0QCJg/UserTweets"
graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List" graphUserTweetsAndReplies* = graphql / "ehMCHF3Mkgjsfz_aImqOsg/UserTweetsAndReplies"
graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug" graphUserMedia* = graphql / "MA_EP2a21zpzNWKRkaPBMg/UserMedia"
graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers" graphTweet* = graphql / "6I7Hm635Q6ftv69L8VrSeQ/TweetDetail"
graphTweetResult* = graphql / "rt-rHeSJ-2H9O9gxWQcPcg/TweetResultByRestId"
graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
graphListBySlug* = graphql / "-kmqNvm5Y-cVrfvBy6docg/ListBySlug"
graphListMembers* = graphql / "P4NpVZDqUD_7MEM84L-8nw/ListMembers"
graphListTweets* = graphql / "jZntL0oVJSdjhmPcdbw_eA/ListLatestTweetsTimeline"
timelineParams* = { timelineParams* = {
"include_profile_interstitial_type": "0", "include_profile_interstitial_type": "0",
@ -36,58 +36,85 @@ const
"include_mute_edge": "0", "include_mute_edge": "0",
"include_can_dm": "0", "include_can_dm": "0",
"include_can_media_tag": "1", "include_can_media_tag": "1",
"include_ext_is_blue_verified": "1",
"skip_status": "1", "skip_status": "1",
"cards_platform": "Web-12", "cards_platform": "Web-12",
"include_cards": "1", "include_cards": "1",
"include_composer_source": "false", "include_composer_source": "0",
"include_reply_count": "1", "include_reply_count": "1",
"tweet_mode": "extended", "tweet_mode": "extended",
"include_entities": "true", "include_entities": "1",
"include_user_entities": "true", "include_user_entities": "1",
"include_ext_media_color": "false", "include_ext_media_color": "0",
"send_error_codes": "true", "send_error_codes": "1",
"simple_quoted_tweet": "true", "simple_quoted_tweet": "1",
"include_quote_count": "true" "include_quote_count": "1"
}.toSeq }.toSeq
searchParams* = { gqlFeatures* = """{
"query_source": "typed_query", "blue_business_profile_image_shape_enabled": false,
"pc": "1", "freedom_of_speech_not_reach_fetch_enabled": false,
"spelling_corrections": "1" "graphql_is_translatable_rweb_tweet_is_translatable_enabled": false,
}.toSeq "interactive_text_enabled": false,
## top: nothing "longform_notetweets_consumption_enabled": true,
## latest: "tweet_search_mode: live" "longform_notetweets_richtext_consumption_enabled": true,
## user: "result_filter: user" "longform_notetweets_rich_text_read_enabled": false,
## photos: "result_filter: photos" "responsive_web_edit_tweet_api_enabled": false,
## videos: "result_filter: videos" "responsive_web_enhance_cards_enabled": false,
"responsive_web_graphql_exclude_directive_enabled": true,
"responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
"responsive_web_graphql_timeline_navigation_enabled": false,
"responsive_web_text_conversations_enabled": false,
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
"spaces_2022_h2_clipping": true,
"spaces_2022_h2_spaces_communities": true,
"standardized_nudges_misinfo": false,
"tweet_awards_web_tipping_enabled": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
"tweetypie_unmention_optimization_enabled": false,
"verified_phone_label_enabled": false,
"vibe_api_enabled": false,
"view_counts_everywhere_api_enabled": false
}""".replace(" ", "").replace("\n", "")
tweetVariables* = """{ tweetVariables* = """{
"focalTweetId": "$1", "focalTweetId": "$1",
$2 $2
"includePromotedContent": false,
"withBirdwatchNotes": false, "withBirdwatchNotes": false,
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false
}"""
tweetResultVariables* = """{
"tweetId": "$1",
"includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withVoice": false,
"withCommunity": false
}"""
userTweetsVariables* = """{
"userId": "$1", $2
"count": 20,
"includePromotedContent": false,
"withDownvotePerspective": false, "withDownvotePerspective": false,
"withReactionsMetadata": false, "withReactionsMetadata": false,
"withReactionsPerspective": false, "withReactionsPerspective": false,
"withSuperFollowsTweetFields": false,
"withSuperFollowsUserFields": false,
"withVoice": false, "withVoice": false,
"withV2Timeline": true "withV2Timeline": true
}""" }"""
tweetFeatures* = """{ listTweetsVariables* = """{
"graphql_is_translatable_rweb_tweet_is_translatable_enabled": false, "listId": "$1", $2
"responsive_web_graphql_timeline_navigation_enabled": false, "count": 20,
"standardized_nudges_misinfo": false, "includePromotedContent": false,
"verified_phone_label_enabled": false, "withDownvotePerspective": false,
"responsive_web_twitter_blue_verified_badge_is_enabled": false, "withReactionsMetadata": false,
"tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false, "withReactionsPerspective": false,
"view_counts_everywhere_api_enabled": false, "withVoice": false
"responsive_web_edit_tweet_api_enabled": false,
"tweetypie_unmention_optimization_enabled": false,
"vibe_api_enabled": false,
"longform_notetweets_consumption_enabled": true,
"responsive_web_text_conversations_enabled": false,
"responsive_web_enhance_cards_enabled": false,
"interactive_text_enabled": false
}""" }"""

View file

@ -1,2 +1,2 @@
import parser/[user, graphql, timeline] import parser/[user, graphql]
export user, graphql, timeline export user, graphql

View file

@ -11,6 +11,7 @@ proc parseGraphUser*(json: string): User =
result = toUser raw.data.user.result.legacy result = toUser raw.data.user.result.legacy
result.id = raw.data.user.result.restId result.id = raw.data.user.result.restId
result.verified = result.verified or raw.data.user.result.isBlueVerified
proc parseGraphListMembers*(json, cursor: string): Result[User] = proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User]( result = Result[User](

View file

@ -1,30 +0,0 @@
import std/[strutils, tables]
import jsony
import user, ../types/timeline
from ../../types import Result, User
proc getId(id: string): string {.inline.} =
let start = id.rfind("-")
if start < 0: return id
id[start + 1 ..< id.len]
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
let raw = json.fromJson(Search)
if raw.timeline.instructions.len == 0:
return
for i in raw.timeline.instructions:
if i.addEntries.entries.len > 0:
for e in i.addEntries.entries:
let id = e.entryId.getId
if e.entryId.startsWith("user"):
if id in raw.globalObjects.users:
result.content.add toUser raw.globalObjects.users[id]
elif e.entryId.startsWith("cursor"):
let cursor = e.content.operation.cursor
if cursor.cursorType == "Top":
result.top = cursor.value
elif cursor.cursorType == "Bottom":
result.bottom = cursor.value

View file

@ -84,6 +84,8 @@ proc parseUnifiedCard*(json: string): Card =
component.parseMedia(card, result) component.parseMedia(card, result)
of buttonGroup: of buttonGroup:
discard discard
of ComponentType.hidden:
result.kind = CardKind.hidden
of ComponentType.unknown: of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json echo "ERROR: Unknown component type: ", json

View file

@ -2,7 +2,7 @@ import std/[algorithm, unicode, re, strutils, strformat, options, nre]
import jsony import jsony
import utils, slices import utils, slices
import ../types/user as userType import ../types/user as userType
from ../../types import User, Error from ../../types import Result, User, Error
let let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
@ -76,3 +76,12 @@ proc parseUser*(json: string; username=""): User =
else: echo "[error - parseUser]: ", error else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser) result = toUser json.fromJson(RawUser)
proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0)
# starting with '{' means it's an error
if json[0] == '[':
let raw = json.fromJson(seq[RawUser])
for user in raw:
result.content.add user.toUser

View file

@ -11,4 +11,5 @@ type
UserResult = object UserResult = object
legacy*: RawUser legacy*: RawUser
restId*: string restId*: string
isBlueVerified*: bool
reason*: Option[string] reason*: Option[string]

View file

@ -17,6 +17,7 @@ type
twitterListDetails twitterListDetails
communityDetails communityDetails
mediaWithDetailsHorizontal mediaWithDetailsHorizontal
hidden
unknown unknown
Component* = object Component* = object
@ -71,11 +72,11 @@ type
Text = object Text = object
content: string content: string
HasTypeField = Component | Destination | MediaEntity | AppStoreData TypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = text.content converter fromText*(text: Text): string = text.content
proc renameHook*(v: var HasTypeField; fieldName: var string) = proc renameHook*(v: var TypeField; fieldName: var string) =
if fieldName == "type": if fieldName == "type":
fieldName = "kind" fieldName = "kind"
@ -89,6 +90,7 @@ proc enumHook*(s: string; v: var ComponentType) =
of "twitter_list_details": twitterListDetails of "twitter_list_details": twitterListDetails
of "community_details": communityDetails of "community_details": communityDetails
of "media_with_details_horizontal": mediaWithDetailsHorizontal of "media_with_details_horizontal": mediaWithDetailsHorizontal
of "commerce_drop_details": hidden
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) = proc enumHook*(s: string; v: var AppType) =

View file

@ -42,5 +42,11 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
except ProtocolError: except ProtocolError:
# Twitter closed the connection, retry # Twitter closed the connection, retry
body body
except BadClientError:
# Twitter returned 503, we need a new client
pool.release(c, true)
badClient = false
c = pool.acquire(heads)
body
finally: finally:
pool.release(c, badClient) pool.release(c, badClient)

View file

@ -81,19 +81,23 @@ routes:
resp Http500, showError( resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg) &"An error occurred, please {link} with the URL you tried to visit.", cfg)
error BadClientError:
echo error.exc.name, ": ", error.exc.msg
resp Http500, showError("Network error occured, please try again.", cfg)
error RateLimitError: error RateLimitError:
const link = a("another instance", href = instancesUrl) const link = a("another instance", href = instancesUrl)
resp Http429, showError( resp Http429, showError(
&"Instance has been rate limited.<br>Use {link} or try again later.", cfg) &"Instance has been rate limited.<br>Use {link} or try again later.", cfg)
extend unsupported, ""
extend preferences, ""
extend resolver, ""
extend rss, "" extend rss, ""
extend status, ""
extend search, "" extend search, ""
extend timeline, "" extend timeline, ""
extend list, ""
extend status, ""
extend media, "" extend media, ""
extend list, ""
extend preferences, ""
extend resolver, ""
extend embed, "" extend embed, ""
extend debug, "" extend debug, ""
extend unsupported, ""

View file

@ -4,6 +4,8 @@ import packedjson, packedjson/deserialiser
import types, parserutils, utils import types, parserutils, utils
import experimental/parser/unifiedcard import experimental/parser/unifiedcard
proc parseGraphTweet(js: JsonNode): Tweet
proc parseUser(js: JsonNode; id=""): User = proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return if js.isNull: return
result = User( result = User(
@ -19,13 +21,20 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt, tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt, likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt, media: js{"media_count"}.getInt,
verified: js{"verified"}.getBool, verified: js{"verified"}.getBool or js{"ext_is_blue_verified"}.getBool,
protected: js{"protected"}.getBool, protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime joinDate: js{"created_at"}.getTime
) )
result.expandUserEntities(js) result.expandUserEntities(js)
proc parseGraphUser(js: JsonNode): User =
let user = ? js{"user_results", "result"}
result = parseUser(user{"legacy"})
if "is_blue_verified" in user:
result.verified = true
proc parseGraphList*(js: JsonNode): List = proc parseGraphList*(js: JsonNode): List =
if js.isNull: return if js.isNull: return
@ -38,11 +47,11 @@ proc parseGraphList*(js: JsonNode): List =
result = List( result = List(
id: list{"id_str"}.getStr, id: list{"id_str"}.getStr,
name: list{"name"}.getStr, name: list{"name"}.getStr,
username: list{"user", "legacy", "screen_name"}.getStr, username: list{"user_results", "result", "legacy", "screen_name"}.getStr,
userId: list{"user", "rest_id"}.getStr, userId: list{"user_results", "result", "rest_id"}.getStr,
description: list{"description"}.getStr, description: list{"description"}.getStr,
members: list{"member_count"}.getInt, members: list{"member_count"}.getInt,
banner: list{"custom_banner_media", "media_info", "url"}.getImageStr banner: list{"custom_banner_media", "media_info", "original_img_url"}.getImageStr
) )
proc parsePoll(js: JsonNode): Poll = proc parsePoll(js: JsonNode): Poll =
@ -213,10 +222,18 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
if js{"is_quote_status"}.getBool: if js{"is_quote_status"}.getBool:
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId) result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
# legacy
with rt, js{"retweeted_status_id_str"}: with rt, js{"retweeted_status_id_str"}:
result.retweet = some Tweet(id: rt.getId) result.retweet = some Tweet(id: rt.getId)
return return
# graphql
with rt, js{"retweeted_status_result", "result"}:
# needed due to weird edgecase where the actual tweet data isn't included
if "legacy" in rt:
result.retweet = some parseGraphTweet(rt)
return
if jsCard.kind != JNull: if jsCard.kind != JNull:
let name = jsCard{"name"}.getStr let name = jsCard{"name"}.getStr
if "poll" in name: if "poll" in name:
@ -237,7 +254,10 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
of "video": of "video":
result.video = some(parseVideo(m)) result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}: with user, m{"additional_media_info", "source_user"}:
if user{"id"}.getInt > 0:
result.attribution = some(parseUser(user)) result.attribution = some(parseUser(user))
else:
result.attribution = some(parseGraphUser(user))
of "animated_gif": of "animated_gif":
result.gif = some(parseGif(m)) result.gif = some(parseGif(m))
else: discard else: discard
@ -299,19 +319,6 @@ proc parseGlobalObjects(js: JsonNode): GlobalObjects =
tweet.user = result.users[tweet.user.id] tweet.user = result.users[tweet.user.id]
result.tweets[k] = tweet result.tweets[k] = tweet
proc parseStatus*(js: JsonNode): Tweet =
with e, js{"errors"}:
if e.getError in {tweetNotFound, tweetUnavailable, tweetCensored, doesntExist,
tweetNotAuthorized, suspended}:
return
result = parseTweet(js, js{"card"})
if not result.isNil:
result.user = parseUser(js{"user"})
with quote, js{"quoted_status"}:
result.quote = some parseStatus(js{"quoted_status"})
proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) = proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) =
if js.kind != JArray or js.len == 0: if js.kind != JArray or js.len == 0:
return return
@ -352,7 +359,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
result.top = e.getCursor result.top = e.getCursor
elif "cursor-bottom" in entry: elif "cursor-bottom" in entry:
result.bottom = e.getCursor result.bottom = e.getCursor
elif entry.startsWith("sq-C"): elif entry.startsWith("sq-cursor"):
with cursor, e{"content", "operation", "cursor"}: with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom": if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr result.bottom = cursor{"value"}.getStr
@ -373,9 +380,20 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
result.add GalleryPhoto(url: url, tweetId: $t.id) result.add GalleryPhoto(url: url, tweetId: $t.id)
proc parseGraphTweet(js: JsonNode): Tweet = proc parseGraphTweet(js: JsonNode): Tweet =
if js.kind == JNull or js{"__typename"}.getStr == "TweetUnavailable": if js.kind == JNull:
return Tweet(available: false) return Tweet(available: false)
case js{"__typename"}.getStr
of "TweetUnavailable":
return Tweet(available: false)
of "TweetTombstone":
return Tweet(
available: false,
text: js{"tombstone", "text"}.getTombstone
)
of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"})
var jsCard = copy(js{"card", "legacy"}) var jsCard = copy(js{"card", "legacy"})
if jsCard.kind != JNull: if jsCard.kind != JNull:
var values = newJObject() var values = newJObject()
@ -384,7 +402,7 @@ proc parseGraphTweet(js: JsonNode): Tweet =
jsCard["binding_values"] = values jsCard["binding_values"] = values
result = parseTweet(js{"legacy"}, jsCard) result = parseTweet(js{"legacy"}, jsCard)
result.user = parseUser(js{"core", "user_results", "result", "legacy"}) result.user = parseGraphUser(js{"core"})
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}: with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
result.expandNoteTweetEntities(noteTweet) result.expandNoteTweetEntities(noteTweet)
@ -407,10 +425,14 @@ proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
if t{"item", "itemContent", "tweetDisplayType"}.getStr == "SelfThread": if t{"item", "itemContent", "tweetDisplayType"}.getStr == "SelfThread":
result.self = true result.self = true
proc parseGraphTweetResult*(js: JsonNode): Tweet =
with tweet, js{"data", "tweetResult", "result"}:
result = parseGraphTweet(tweet)
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation = proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true)) result = Conversation(replies: Result[Chain](beginning: true))
let instructions = ? js{"data", "threaded_conversation_with_injections_v2", "instructions"} let instructions = ? js{"data", "threaded_conversation_with_injections", "instructions"}
if instructions.len == 0: if instructions.len == 0:
return return
@ -418,7 +440,8 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
let entryId = e{"entryId"}.getStr let entryId = e{"entryId"}.getStr
# echo entryId # echo entryId
if entryId.startsWith("tweet"): if entryId.startsWith("tweet"):
let tweet = parseGraphTweet(e{"content", "itemContent", "tweet_results", "result"}) with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available: if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId()) tweet.id = parseBiggestInt(entryId.getId())
@ -427,6 +450,18 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result.tweet = tweet result.tweet = tweet
else: else:
result.before.content.add tweet result.before.content.add tweet
elif entryId.startsWith("tombstone"):
let id = entryId.getId()
let tweet = Tweet(
id: parseBiggestInt(id),
available: false,
text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
)
if id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
elif entryId.startsWith("conversationthread"): elif entryId.startsWith("conversationthread"):
let (thread, self) = parseGraphThread(e) let (thread, self) = parseGraphThread(e)
if self: if self:
@ -435,3 +470,54 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result.replies.content.add thread result.replies.content.add thread
elif entryId.startsWith("cursor-bottom"): elif entryId.startsWith("cursor-bottom"):
result.replies.bottom = e{"content", "itemContent", "value"}.getStr result.replies.bottom = e{"content", "itemContent", "value"}.getStr
proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let instructions =
if root == "list": ? js{"data", "list", "tweets_timeline", "timeline", "instructions"}
else: ? js{"data", "user", "result", "timeline_v2", "timeline", "instructions"}
if instructions.len == 0:
return
for i in instructions:
if i{"type"}.getStr == "TimelineAddEntries":
for e in i{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("profile-conversation") or entryId.startsWith("homeConversation"):
let (thread, self) = parseGraphThread(e)
for tweet in thread.content:
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
result = Timeline(beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0:
return
for instruction in instructions:
let typ = instruction{"type"}.getStr
if typ == "TimelineAddEntries":
for e in instructions[0]{"entries"}:
let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"):
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
let tweet = parseGraphTweet(tweetResult)
if not tweet.available:
tweet.id = parseBiggestInt(entryId.getId())
result.content.add tweet
elif entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
result.bottom = instruction{"entry", "content", "value"}.getStr

View file

@ -130,7 +130,7 @@ proc getBanner*(js: JsonNode): string =
return return
proc getTombstone*(js: JsonNode): string = proc getTombstone*(js: JsonNode): string =
result = js{"tombstoneInfo", "richText", "text"}.getStr result = js{"text"}.getStr
result.removeSuffix(" Learn more") result.removeSuffix(" Learn more")
proc getMp4Resolution*(url: string): int = proc getMp4Resolution*(url: string): int =

View file

@ -153,7 +153,7 @@ proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if tweet != redisNil: if tweet != redisNil:
tweet.deserialize(Tweet) tweet.deserialize(Tweet)
else: else:
result = await getStatus($id) result = await getGraphTweetResult($id)
if not result.isNil: if not result.isNil:
await cache(result) await cache(result)

View file

@ -6,7 +6,6 @@ import jester
import router_utils import router_utils
import ".."/[types, redis_cache, api] import ".."/[types, redis_cache, api]
import ../views/[general, timeline, list] import ../views/[general, timeline, list]
export getListTimeline, getGraphList
template respList*(list, timeline, title, vnode: typed) = template respList*(list, timeline, title, vnode: typed) =
if list.id.len == 0 or list.name.len == 0: if list.id.len == 0 or list.name.len == 0:
@ -39,7 +38,7 @@ proc createListRouter*(cfg: Config) =
let let
prefs = cookiePrefs() prefs = cookiePrefs()
list = await getCachedList(id=(@"id")) list = await getCachedList(id=(@"id"))
timeline = await getListTimeline(list.id, getCursor()) timeline = await getGraphListTweets(list.id, getCursor())
vnode = renderTimelineTweets(timeline, prefs, request.path) vnode = renderTimelineTweets(timeline, prefs, request.path)
respList(list, timeline, list.title, vnode) respList(list, timeline, list.title, vnode)

View file

@ -28,7 +28,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
var q = query var q = query
q.fromUser = names q.fromUser = names
profile = Profile( profile = Profile(
tweets: await getSearch[Tweet](q, after), tweets: await getGraphSearch(q, after),
# this is kinda dumb # this is kinda dumb
user: User( user: User(
username: name, username: name,
@ -78,7 +78,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0: if rss.cursor.len > 0:
respRss(rss, "Search") respRss(rss, "Search")
let tweets = await getSearch[Tweet](query, cursor) let tweets = await getGraphSearch(query, cursor)
rss.cursor = tweets.bottom rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg) rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
@ -160,7 +160,7 @@ proc createRssRouter*(cfg: Config) =
let let
list = await getCachedList(id=id) list = await getCachedList(id=id)
timeline = await getListTimeline(list.id, cursor) timeline = await getGraphListTweets(list.id, cursor)
rss.cursor = timeline.bottom rss.cursor = timeline.bottom
rss.feed = renderListRss(timeline.content, list, cfg) rss.feed = renderListRss(timeline.content, list, cfg)

View file

@ -27,11 +27,15 @@ proc createSearchRouter*(cfg: Config) =
of users: of users:
if "," in q: if "," in q:
redirect("/" & q) redirect("/" & q)
let users = await getSearch[User](query, getCursor()) var users: Result[User]
try:
users = await getUserSearch(query, getCursor())
except InternalError:
users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title) resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets: of tweets:
let let
tweets = await getSearch[Tweet](query, getCursor()) tweets = await getGraphSearch(query, getCursor())
rss = "/search/rss?" & genQueryUrl(query) rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, cfg, prefs, getPath()), resp renderMain(renderTweetSearch(tweets, cfg, prefs, getPath()),
request, cfg, prefs, title, rss=rss) request, cfg, prefs, title, rss=rss)

View file

@ -16,17 +16,21 @@ proc createStatusRouter*(cfg: Config) =
router status: router status:
get "/@name/status/@id/?": get "/@name/status/@id/?":
cond '.' notin @"name" cond '.' notin @"name"
cond not @"id".any(c => not c.isDigit) let id = @"id"
if id.len > 19 or id.any(c => not c.isDigit):
resp Http404, showError("Invalid tweet ID", cfg)
let prefs = cookiePrefs() let prefs = cookiePrefs()
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
let replies = await getReplies(@"id", getCursor()) let replies = await getReplies(id, getCursor())
if replies.content.len == 0: if replies.content.len == 0:
resp Http404, "" resp Http404, ""
resp $renderReplies(replies, prefs, getPath()) resp $renderReplies(replies, prefs, getPath())
let conv = await getTweet(@"id", getCursor()) let conv = await getTweet(id, getCursor())
if conv == nil: if conv == nil:
echo "nil conv" echo "nil conv"

View file

@ -48,11 +48,11 @@ proc fetchProfile*(after: string; query: Query; cfg: Config; skipRail=false;
let let
timeline = timeline =
case query.kind case query.kind
of posts: getTimeline(userId, after) of posts: getGraphUserTweets(userId, TimelineKind.tweets, after)
of replies: getTimeline(userId, after, replies=true) of replies: getGraphUserTweets(userId, TimelineKind.replies, after)
of media: getMediaTimeline(userId, after) of media: getGraphUserTweets(userId, TimelineKind.media, after)
of favorites: getFavorites(userId, cfg, after) of favorites: getFavorites(userId, cfg, after)
else: getSearch[Tweet](query, after) else: getGraphSearch(query, after)
rail = rail =
skipIf(skipRail or query.kind == media, @[]): skipIf(skipRail or query.kind == media, @[]):
@ -66,6 +66,7 @@ proc fetchProfile*(after: string; query: Query; cfg: Config; skipRail=false;
let tweet = await getCachedTweet(user.pinnedTweet) let tweet = await getCachedTweet(user.pinnedTweet)
if not tweet.isNil: if not tweet.isNil:
tweet.pinned = true tweet.pinned = true
tweet.user = user
pinned = some tweet pinned = some tweet
result = Profile( result = Profile(
@ -84,7 +85,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} = rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1: if query.fromUser.len != 1:
let let
timeline = await getSearch[Tweet](query, after) timeline = await getGraphSearch(query, after)
html = renderTweetSearch(timeline, cfg, prefs, getPath()) html = renderTweetSearch(timeline, cfg, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss) return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@ -125,7 +126,7 @@ proc createTimelineRouter*(cfg: Config) =
get "/@name/?@tab?/?": get "/@name/?@tab?/?":
cond '.' notin @"name" cond '.' notin @"name"
cond @"name" notin ["pic", "gif", "video"] cond @"name" notin ["pic", "gif", "video", "search", "settings", "login", "intent", "i"]
cond @"tab" in ["with_replies", "media", "search", "favorites", ""] cond @"tab" in ["with_replies", "media", "search", "favorites", ""]
let let
prefs = cookiePrefs() prefs = cookiePrefs()
@ -139,7 +140,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature # used for the infinite scroll feature
if @"scroll".len > 0: if @"scroll".len > 0:
if query.fromUser.len != 1: if query.fromUser.len != 1:
var timeline = await getSearch[Tweet](query, after) var timeline = await getGraphSearch(query, after)
if timeline.content.len == 0: resp Http404 if timeline.content.len == 0: resp Http404
timeline.beginning = true timeline.beginning = true
resp $renderTweetSearch(timeline, cfg, prefs, getPath()) resp $renderTweetSearch(timeline, cfg, prefs, getPath())

View file

@ -1,8 +1,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables import strutils, tables
import zippy import types, consts
import types, consts, http_pool
const const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
@ -11,11 +10,12 @@ const
failDelay = initDuration(minutes=30) failDelay = initDuration(minutes=30)
var var
clientPool: HttpPool
tokenPool: seq[Token] tokenPool: seq[Token]
lastFailed: Time lastFailed: Time
enableLogging = false enableLogging = false
let headers = newHttpHeaders({"authorization": auth})
template log(str) = template log(str) =
if enableLogging: echo "[tokens] ", str if enableLogging: echo "[tokens] ", str
@ -41,9 +41,12 @@ proc getPoolJson*(): JsonNode =
let let
maxReqs = maxReqs =
case api case api
of Api.listMembers, Api.listBySlug, Api.list,
Api.userRestId, Api.userScreenName, Api.tweetDetail: 500
of Api.timeline: 187 of Api.timeline: 187
of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets,
Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
Api.userRestId, Api.userScreenName,
Api.tweetDetail, Api.tweetResult, Api.search: 500
of Api.userSearch: 900
else: 180 else: 180
reqs = maxReqs - token.apis[api].remaining reqs = maxReqs - token.apis[api].remaining
@ -65,18 +68,12 @@ proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay: if getTime() - lastFailed < failDelay:
raise rateLimitError() raise rateLimitError()
let headers = newHttpHeaders({ let client = newAsyncHttpClient(headers=headers)
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
"accept-encoding": "gzip",
"accept-language": "en-US,en;q=0.5",
"connection": "keep-alive",
"authorization": auth
})
try: try:
let let
resp = clientPool.use(headers): await c.postContent(activate) resp = await client.postContent(activate)
tokNode = parseJson(uncompress(resp))["guest_token"] tokNode = parseJson(resp)["guest_token"]
tok = tokNode.getStr($(tokNode.getInt)) tok = tokNode.getStr($(tokNode.getInt))
time = getTime() time = getTime()
@ -86,6 +83,8 @@ proc fetchToken(): Future[Token] {.async.} =
if "Try again" notin e.msg: if "Try again" notin e.msg:
echo "[tokens] fetching tokens paused, resuming in 30 minutes" echo "[tokens] fetching tokens paused, resuming in 30 minutes"
lastFailed = getTime() lastFailed = getTime()
finally:
client.close()
proc expired(token: Token): bool = proc expired(token: Token): bool =
let time = getTime() let time = getTime()
@ -158,7 +157,6 @@ proc poolTokens*(amount: int) {.async.} =
tokenPool.add newToken tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} = proc initTokenPool*(cfg: Config) {.async.} =
clientPool = HttpPool()
enableLogging = cfg.enableDebug enableLogging = cfg.enableDebug
while true: while true:

View file

@ -7,20 +7,29 @@ genPrefsType()
type type
RateLimitError* = object of CatchableError RateLimitError* = object of CatchableError
InternalError* = object of CatchableError InternalError* = object of CatchableError
BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum
tweets
replies
media
Api* {.pure.} = enum Api* {.pure.} = enum
tweetDetail tweetDetail
userShow tweetResult
timeline timeline
search search
tweet userSearch
list list
listBySlug listBySlug
listMembers listMembers
listTweets
userRestId userRestId
userScreenName userScreenName
status
favorites favorites
userTweets
userTweetsAndReplies
userMedia
RateLimit* = object RateLimit* = object
remaining*: int remaining*: int
@ -37,8 +46,10 @@ type
null = 0 null = 0
noUserMatches = 17 noUserMatches = 17
protectedUser = 22 protectedUser = 22
missingParams = 25
couldntAuth = 32 couldntAuth = 32
doesntExist = 34 doesntExist = 34
invalidParam = 47
userNotFound = 50 userNotFound = 50
suspended = 63 suspended = 63
rateLimited = 88 rateLimited = 88
@ -151,6 +162,7 @@ type
imageDirectMessage = "image_direct_message" imageDirectMessage = "image_direct_message"
audiospace = "audiospace" audiospace = "audiospace"
newsletterPublication = "newsletter_publication" newsletterPublication = "newsletter_publication"
hidden
unknown unknown
Card* = object Card* = object

View file

@ -59,8 +59,7 @@ proc buttonReferer*(action, text, path: string; class=""; `method`="post"): VNod
proc genCheckbox*(pref, label: string; state: bool): VNode = proc genCheckbox*(pref, label: string; state: bool): VNode =
buildHtml(label(class="pref-group checkbox-container")): buildHtml(label(class="pref-group checkbox-container")):
text label text label
if state: input(name=pref, `type`="checkbox", checked="") input(name=pref, `type`="checkbox", checked=state)
else: input(name=pref, `type`="checkbox")
span(class="checkbox") span(class="checkbox")
proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true): VNode = proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true): VNode =
@ -68,20 +67,15 @@ proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true
buildHtml(tdiv(class=("pref-group pref-input " & class))): buildHtml(tdiv(class=("pref-group pref-input " & class))):
if label.len > 0: if label.len > 0:
label(`for`=pref): text label label(`for`=pref): text label
if autofocus and state.len == 0: input(name=pref, `type`="text", placeholder=p, value=state, autofocus=(autofocus and state.len == 0))
input(name=pref, `type`="text", placeholder=p, value=state, autofocus="")
else:
input(name=pref, `type`="text", placeholder=p, value=state)
proc genSelect*(pref, label, state: string; options: seq[string]): VNode = proc genSelect*(pref, label, state: string; options: seq[string]): VNode =
buildHtml(tdiv(class="pref-group pref-input")): buildHtml(tdiv(class="pref-group pref-input")):
label(`for`=pref): text label label(`for`=pref): text label
select(name=pref): select(name=pref):
for opt in options: for opt in options:
if opt == state: option(value=opt, selected=(opt == state)):
option(value=opt, selected=""): text opt text opt
else:
option(value=opt): text opt
proc genDate*(pref, state: string): VNode = proc genDate*(pref, state: string): VNode =
buildHtml(span(class="date-input")): buildHtml(span(class="date-input")):
@ -93,12 +87,9 @@ proc genImg*(url: string; class=""): VNode =
img(src=getPicUrl(url), class=class, alt="") img(src=getPicUrl(url), class=class, alt="")
proc getTabClass*(query: Query; tab: QueryKind): string = proc getTabClass*(query: Query; tab: QueryKind): string =
result = "tab-item" if query.kind == tab: "tab-item active"
if query.kind == tab: else: "tab-item"
result &= " active"
proc getAvatarClass*(prefs: Prefs): string = proc getAvatarClass*(prefs: Prefs): string =
if prefs.squareAvatars: if prefs.squareAvatars: "avatar"
"avatar" else: "avatar round"
else:
"avatar round"

View file

@ -66,12 +66,10 @@ proc renderSearchPanel*(query: Query): VNode =
hiddenField("f", "tweets") hiddenField("f", "tweets")
genInput("q", "", query.text, "Enter search...", class="pref-inline") genInput("q", "", query.text, "Enter search...", class="pref-inline")
button(`type`="submit"): icon "search" button(`type`="submit"): icon "search"
if isPanelOpen(query):
input(id="search-panel-toggle", `type`="checkbox", checked="") input(id="search-panel-toggle", `type`="checkbox", checked=isPanelOpen(query))
else: label(`for`="search-panel-toggle"): icon "down"
input(id="search-panel-toggle", `type`="checkbox")
label(`for`="search-panel-toggle"):
icon "down"
tdiv(class="search-panel"): tdiv(class="search-panel"):
for f in @["filter", "exclude"]: for f in @["filter", "exclude"]:
span(class="search-title"): text capitalize(f) span(class="search-title"): text capitalize(f)

View file

@ -106,14 +106,10 @@ proc renderVideo*(video: Video; prefs: Prefs; path: string): VNode =
else: vidUrl else: vidUrl
case playbackType case playbackType
of mp4: of mp4:
if prefs.muteVideos: video(poster=thumb, controls="", muted=prefs.muteVideos):
video(poster=thumb, controls="", muted=""):
source(src=source, `type`="video/mp4")
else:
video(poster=thumb, controls=""):
source(src=source, `type`="video/mp4") source(src=source, `type`="video/mp4")
of m3u8, vmap: of m3u8, vmap:
video(poster=thumb, data-url=source, data-autoload="false") video(poster=thumb, data-url=source, data-autoload="false", muted=prefs.muteVideos)
verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">" verbatim "<div class=\"video-overlay\" onclick=\"playVideo(this)\">"
tdiv(class="overlay-circle"): span(class="overlay-triangle") tdiv(class="overlay-circle"): span(class="overlay-triangle")
verbatim "</div>" verbatim "</div>"
@ -127,14 +123,9 @@ proc renderGif(gif: Gif; prefs: Prefs): VNode =
buildHtml(tdiv(class="attachments media-gif")): buildHtml(tdiv(class="attachments media-gif")):
tdiv(class="gallery-gif", style={maxHeight: "unset"}): tdiv(class="gallery-gif", style={maxHeight: "unset"}):
tdiv(class="attachment"): tdiv(class="attachment"):
let thumb = getSmallPic(gif.thumb) video(class="gif", poster=getSmallPic(gif.thumb), autoplay=prefs.autoplayGifs,
let url = getPicUrl(gif.url) controls="", muted="", loop=""):
if prefs.autoplayGifs: source(src=getPicUrl(gif.url), `type`="video/mp4")
video(class="gif", poster=thumb, controls="", autoplay="", muted="", loop=""):
source(src=url, `type`="video/mp4")
else:
video(class="gif", poster=thumb, controls="", muted="", loop=""):
source(src=url, `type`="video/mp4")
proc renderPoll(poll: Poll): VNode = proc renderPoll(poll: Poll): VNode =
buildHtml(tdiv(class="poll")): buildHtml(tdiv(class="poll")):
@ -328,7 +319,7 @@ proc renderTweet*(tweet: Tweet; prefs: Prefs; path: string; class=""; index=0;
if tweet.attribution.isSome: if tweet.attribution.isSome:
renderAttribution(tweet.attribution.get(), prefs) renderAttribution(tweet.attribution.get(), prefs)
if tweet.card.isSome: if tweet.card.isSome and tweet.card.get().kind != hidden:
renderCard(tweet.card.get(), prefs, path) renderCard(tweet.card.get(), prefs, path)
if tweet.photos.len > 0: if tweet.photos.len > 0:

View file

@ -3,11 +3,6 @@ from parameterized import parameterized
card = [ card = [
['Thom_Wolf/status/1122466524860702729',
'facebookresearch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',
'github.com', True],
['nim_lang/status/1136652293510717440', ['nim_lang/status/1136652293510717440',
'Version 0.20.0 released', 'Version 0.20.0 released',
'We are very proud to announce Nim version 0.20. This is a massive release, both literally and figuratively. It contains more than 1,000 commits and it marks our release candidate for version 1.0!', 'We are very proud to announce Nim version 0.20. This is a massive release, both literally and figuratively. It contains more than 1,000 commits and it marks our release candidate for version 1.0!',
@ -25,6 +20,11 @@ card = [
] ]
no_thumb = [ no_thumb = [
['Thom_Wolf/status/1122466524860702729',
'facebookresearch/fairseq',
'Facebook AI Research Sequence-to-Sequence Toolkit written in Python. - GitHub - facebookresearch/fairseq: Facebook AI Research Sequence-to-Sequence Toolkit written in Python.',
'github.com'],
['brent_p/status/1088857328680488961', ['brent_p/status/1088857328680488961',
'Hts Nim Sugar', 'Hts Nim Sugar',
'hts-nim is a library that allows one to use htslib via the nim programming language. Nim is a garbage-collected language that compiles to C and often has similar performance. I have become very...', 'hts-nim is a library that allows one to use htslib via the nim programming language. Nim is a garbage-collected language that compiles to C and often has similar performance. I have become very...',

View file

@ -17,11 +17,6 @@ protected = [
invalid = [['thisprofiledoesntexist'], ['%']] invalid = [['thisprofiledoesntexist'], ['%']]
banner_color = [
['nim_lang', '22, 25, 32'],
['rustlang', '35, 31, 32']
]
banner_image = [ banner_image = [
['mobile_test', 'profile_banners%2F82135242%2F1384108037%2F1500x500'] ['mobile_test', 'profile_banners%2F82135242%2F1384108037%2F1500x500']
] ]
@ -74,12 +69,6 @@ class ProfileTest(BaseTestCase):
self.open_nitter('user') self.open_nitter('user')
self.assert_text('User "user" has been suspended') self.assert_text('User "user" has been suspended')
@parameterized.expand(banner_color)
def test_banner_color(self, username, color):
self.open_nitter(username)
banner = self.find_element(Profile.banner + ' a')
self.assertIn(color, banner.value_of_css_property('background-color'))
@parameterized.expand(banner_image) @parameterized.expand(banner_image)
def test_banner_image(self, username, url): def test_banner_image(self, username, url):
self.open_nitter(username) self.open_nitter(username)