diff --git a/nitter.example.conf b/nitter.example.conf
index a7abea8..0d4deb7 100644
--- a/nitter.example.conf
+++ b/nitter.example.conf
@@ -1,11 +1,11 @@
[Server]
+hostname = "nitter.net" # for generating links, change this to your own domain/ip
+title = "nitter"
address = "0.0.0.0"
port = 8080
https = false # disable to enable cookies when not using https
httpMaxConnections = 100
staticDir = "./public"
-title = "nitter"
-hostname = "nitter.net"
[Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
@@ -13,9 +13,9 @@ rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379
redisPassword = ""
-redisConnections = 20 # connection pool size
+redisConnections = 20 # minimum open connections in pool
redisMaxConnections = 30
-# max, new connections are opened when none are available, but if the pool size
+# new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second
@@ -23,15 +23,15 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds
-enableDebug = false # enable request logs and debug endpoints
+enableDebug = false # enable request logs and debug endpoints (/.tokens)
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = ""
tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests,
-# but they expire after ~1 hour, and have a limit of 187 requests.
-# the limit gets reset every 15 minutes, and the pool is filled up so there's
-# always at least $tokenCount usable tokens. again, only increase this if
-# you receive major bursts all the time
+# but they expire after ~1 hour, and have a limit of 500 requests per endpoint.
+# the limits reset every 15 minutes, and the pool is filled up so there's
+# always at least `tokenCount` usable tokens. only increase this if you receive
+# major bursts all the time and don't have a rate limiting setup via e.g. nginx
# Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences]
diff --git a/nitter.nimble b/nitter.nimble
index f9aa72a..7771b31 100644
--- a/nitter.nimble
+++ b/nitter.nimble
@@ -12,7 +12,7 @@ bin = @["nitter"]
requires "nim >= 1.4.8"
requires "jester#baca3f"
-requires "karax#9ee695b"
+requires "karax#5cf360c"
requires "sass#7dfdd03"
requires "nimcrypto#4014ef9"
requires "markdown#158efe3"
diff --git a/src/api.nim b/src/api.nim
index dfcf413..b23aa87 100644
--- a/src/api.nim
+++ b/src/api.nim
@@ -7,106 +7,82 @@ import experimental/parser as newParser
proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
- variables = """{
- "screen_name": "$1",
- "withSafetyModeUserFields": false,
- "withSuperFollowsUserFields": false
- }""" % [username]
- js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
+ variables = %*{"screen_name": username}
+ params = {"variables": $variables, "features": gqlFeatures}
+ js = await fetchRaw(graphUser ? params, Api.userScreenName)
result = parseGraphUser(js)
proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return
let
- variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id]
- js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId)
+ variables = %*{"userId": id}
+ params = {"variables": $variables, "features": gqlFeatures}
+ js = await fetchRaw(graphUserById ? params, Api.userRestId)
result = parseGraphUser(js)
+proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Timeline] {.async.} =
+ if id.len == 0: return
+ let
+ cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
+ variables = userTweetsVariables % [id, cursor]
+ params = {"variables": variables, "features": gqlFeatures}
+ (url, apiId) = case kind
+ of TimelineKind.tweets: (graphUserTweets, Api.userTweets)
+ of TimelineKind.replies: (graphUserTweetsAndReplies, Api.userTweetsAndReplies)
+ of TimelineKind.media: (graphUserMedia, Api.userMedia)
+ js = await fetch(url ? params, apiId)
+ result = parseGraphTimeline(js, "user", after)
+
+proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
+ if id.len == 0: return
+ let
+ cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
+ variables = listTweetsVariables % [id, cursor]
+ params = {"variables": variables, "features": gqlFeatures}
+ js = await fetch(graphListTweets ? params, Api.listTweets)
+ result = parseGraphTimeline(js, "list", after)
+
proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let
- variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
- url = graphListBySlug ? {"variables": $variables}
- result = parseGraphList(await fetch(url, Api.listBySlug))
+ variables = %*{"screenName": name, "listSlug": list}
+ params = {"variables": $variables, "features": gqlFeatures}
+ result = parseGraphList(await fetch(graphListBySlug ? params, Api.listBySlug))
proc getGraphList*(id: string): Future[List] {.async.} =
let
- variables = %*{"listId": id, "withHighlightedLabel": false}
- url = graphList ? {"variables": $variables}
- result = parseGraphList(await fetch(url, Api.list))
+ variables = %*{"listId": id}
+ params = {"variables": $variables, "features": gqlFeatures}
+ result = parseGraphList(await fetch(graphListById ? params, Api.list))
proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return
var
variables = %*{
"listId": list.id,
- "withSuperFollowsUserFields": false,
"withBirdwatchPivots": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
- "withReactionsPerspective": false,
- "withSuperFollowsTweetFields": false
+ "withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
- let url = graphListMembers ? {"variables": $variables}
+ let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)
-proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
+proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return
let
- ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
- url = listTimeline ? ps
- result = parseTimeline(await fetch(url, Api.timeline), after)
-
-proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
- if id.len == 0: return
- let
- ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
- url = timeline / (id & ".json") ? ps
- result = parseTimeline(await fetch(url, Api.timeline), after)
-
-proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
- if id.len == 0: return
- let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
- result = parseTimeline(await fetch(url, Api.timeline), after)
-
-proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
- if name.len == 0: return
- let
- ps = genParams({"screen_name": name, "trim_user": "true"},
- count="18", ext=false)
- url = photoRail ? ps
- result = parsePhotoRail(await fetch(url, Api.timeline))
-
-proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
- when T is User:
- const
- searchMode = ("result_filter", "user")
- parse = parseUsers
- fetchFunc = fetchRaw
- else:
- const
- searchMode = ("tweet_search_mode", "live")
- parse = parseTimeline
- fetchFunc = fetch
-
- let q = genQueryParam(query)
- if q.len == 0 or q == emptyQuery:
- return Result[T](beginning: true, query: query)
-
- let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
- try:
- result = parse(await fetchFunc(url, Api.search), after)
- result.query = query
- except InternalError:
- return Result[T](beginning: true, query: query)
+ variables = tweetResultVariables % id
+ params = {"variables": variables, "features": gqlFeatures}
+ js = await fetch(graphTweetResult ? params, Api.tweetResult)
+ result = parseGraphTweetResult(js)
proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = tweetVariables % [id, cursor]
- params = {"variables": variables, "features": tweetFeatures}
+ params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweet ? params, Api.tweetDetail)
result = parseGraphConversation(js, id)
@@ -119,9 +95,51 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0:
result.replies = await getReplies(id, after)
-proc getStatus*(id: string): Future[Tweet] {.async.} =
- let url = status / (id & ".json") ? genParams()
- result = parseStatus(await fetch(url, Api.status))
+proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
+ let q = genQueryParam(query)
+ if q.len == 0 or q == emptyQuery:
+ return Result[Tweet](query: query, beginning: true)
+
+ var
+ variables = %*{
+ "rawQuery": q,
+ "count": 20,
+ "product": "Latest",
+ "withDownvotePerspective": false,
+ "withReactionsMetadata": false,
+ "withReactionsPerspective": false
+ }
+ if after.len > 0:
+ variables["cursor"] = % after
+ let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
+ result = parseGraphSearch(await fetch(url, Api.search), after)
+ result.query = query
+
+proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
+ if query.text.len == 0:
+ return Result[User](query: query, beginning: true)
+
+ var url = userSearch ? {
+ "q": query.text,
+ "skip_status": "1",
+ "count": "20",
+ "page": page
+ }
+
+ result = parseUsers(await fetchRaw(url, Api.userSearch))
+ result.query = query
+ if page.len == 0:
+ result.bottom = "2"
+ elif page.allCharsInSet(Digits):
+ result.bottom = $(parseInt(page) + 1)
+
+proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
+ if name.len == 0: return
+ let
+ ps = genParams({"screen_name": name, "trim_user": "true"},
+ count="18", ext=false)
+ url = photoRail ? ps
+ result = parsePhotoRail(await fetch(url, Api.timeline))
proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0)
diff --git a/src/apiutils.nim b/src/apiutils.nim
index 96ace0c..dbc6cca 100644
--- a/src/apiutils.nim
+++ b/src/apiutils.nim
@@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p
if ext:
result &= ("ext", "mediaStats")
- result &= ("include_ext_alt_text", "true")
- result &= ("include_ext_media_availability", "true")
+ result &= ("include_ext_alt_text", "1")
+ result &= ("include_ext_media_availability", "1")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:
@@ -44,7 +44,7 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
})
template updateToken() =
- if api != Api.search and resp.headers.hasKey(rlRemaining):
+ if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
@@ -67,14 +67,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =
getContent()
- # Twitter randomly returns 401 errors with an empty body quite often.
- # Retrying the request usually works.
- if resp.status == "401 Unauthorized" and result.len == 0:
- getContent()
-
- if resp.status == $Http503:
- badClient = true
- raise newException(InternalError, result)
+ if resp.status == $Http503:
+ badClient = true
+ raise newException(BadClientError, "Bad client")
if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
@@ -90,6 +85,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =
raise newException(InternalError, $url)
except InternalError as e:
raise e
+ except BadClientError as e:
+ release(token, used=true)
+ raise e
except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
diff --git a/src/consts.nim b/src/consts.nim
index bb4e1a3..27e82f9 100644
--- a/src/consts.nim
+++ b/src/consts.nim
@@ -1,30 +1,28 @@
# SPDX-License-Identifier: AGPL-3.0-only
-import uri, sequtils
+import uri, sequtils, strutils
const
- auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAPYXBAAAAAAACLXUNDekMxqa8h%2F40K4moUkGsoc%3DTYfbDKbT3jJPCEVnMYqilB28NHfOPqkca3qaAxGfsyKCs0wRbw"
+ auth* = "Bearer AAAAAAAAAAAAAAAAAAAAANRILgAAAAAAnNwIzUejRCOuH5E6I8xnZz4puTs%3D1Zv7ttfk8LF81IUq16cHjhLTvJu4FA33AGWWjCpTnA"
api = parseUri("https://api.twitter.com")
activate* = $(api / "1.1/guest/activate.json")
- userShow* = api / "1.1/users/show.json"
photoRail* = api / "1.1/statuses/media_timeline.json"
- status* = api / "1.1/statuses/show"
- search* = api / "2/search/adaptive.json"
-
- timelineApi = api / "2/timeline"
- timeline* = timelineApi / "profile"
- mediaTimeline* = timelineApi / "media"
- listTimeline* = timelineApi / "list.json"
- tweet* = timelineApi / "conversation"
+ userSearch* = api / "1.1/users/search.json"
graphql = api / "graphql"
- graphTweet* = graphql / "6lWNh96EXDJCXl05SAtn_g/TweetDetail"
- graphUser* = graphql / "7mjxD3-C6BxitPMVQ6w0-Q/UserByScreenName"
- graphUserById* = graphql / "I5nvpI91ljifos1Y3Lltyg/UserByRestId"
- graphList* = graphql / "JADTh6cjebfgetzvF3tQvQ/List"
- graphListBySlug* = graphql / "ErWsz9cObLel1BF-HjuBlA/ListBySlug"
- graphListMembers* = graphql / "Ke6urWMeCV2UlKXGRy4sow/ListMembers"
+ graphUser* = graphql / "8mPfHBetXOg-EHAyeVxUoA/UserByScreenName"
+ graphUserById* = graphql / "nI8WydSd-X-lQIVo6bdktQ/UserByRestId"
+ graphUserTweets* = graphql / "9rys0A7w1EyqVd2ME0QCJg/UserTweets"
+ graphUserTweetsAndReplies* = graphql / "ehMCHF3Mkgjsfz_aImqOsg/UserTweetsAndReplies"
+ graphUserMedia* = graphql / "MA_EP2a21zpzNWKRkaPBMg/UserMedia"
+ graphTweet* = graphql / "6I7Hm635Q6ftv69L8VrSeQ/TweetDetail"
+ graphTweetResult* = graphql / "rt-rHeSJ-2H9O9gxWQcPcg/TweetResultByRestId"
+ graphSearchTimeline* = graphql / "gkjsKepM6gl_HmFWoWKfgg/SearchTimeline"
+ graphListById* = graphql / "iTpgCtbdxrsJfyx0cFjHqg/ListByRestId"
+ graphListBySlug* = graphql / "-kmqNvm5Y-cVrfvBy6docg/ListBySlug"
+ graphListMembers* = graphql / "P4NpVZDqUD_7MEM84L-8nw/ListMembers"
+ graphListTweets* = graphql / "jZntL0oVJSdjhmPcdbw_eA/ListLatestTweetsTimeline"
timelineParams* = {
"include_profile_interstitial_type": "0",
@@ -35,58 +33,85 @@ const
"include_mute_edge": "0",
"include_can_dm": "0",
"include_can_media_tag": "1",
+ "include_ext_is_blue_verified": "1",
"skip_status": "1",
"cards_platform": "Web-12",
"include_cards": "1",
- "include_composer_source": "false",
+ "include_composer_source": "0",
"include_reply_count": "1",
"tweet_mode": "extended",
- "include_entities": "true",
- "include_user_entities": "true",
- "include_ext_media_color": "false",
- "send_error_codes": "true",
- "simple_quoted_tweet": "true",
- "include_quote_count": "true"
+ "include_entities": "1",
+ "include_user_entities": "1",
+ "include_ext_media_color": "0",
+ "send_error_codes": "1",
+ "simple_quoted_tweet": "1",
+ "include_quote_count": "1"
}.toSeq
- searchParams* = {
- "query_source": "typed_query",
- "pc": "1",
- "spelling_corrections": "1"
- }.toSeq
- ## top: nothing
- ## latest: "tweet_search_mode: live"
- ## user: "result_filter: user"
- ## photos: "result_filter: photos"
- ## videos: "result_filter: videos"
+ gqlFeatures* = """{
+ "blue_business_profile_image_shape_enabled": false,
+ "freedom_of_speech_not_reach_fetch_enabled": false,
+ "graphql_is_translatable_rweb_tweet_is_translatable_enabled": false,
+ "interactive_text_enabled": false,
+ "longform_notetweets_consumption_enabled": true,
+ "longform_notetweets_richtext_consumption_enabled": true,
+ "longform_notetweets_rich_text_read_enabled": false,
+ "responsive_web_edit_tweet_api_enabled": false,
+ "responsive_web_enhance_cards_enabled": false,
+ "responsive_web_graphql_exclude_directive_enabled": true,
+ "responsive_web_graphql_skip_user_profile_image_extensions_enabled": false,
+ "responsive_web_graphql_timeline_navigation_enabled": false,
+ "responsive_web_text_conversations_enabled": false,
+ "responsive_web_twitter_blue_verified_badge_is_enabled": true,
+ "spaces_2022_h2_clipping": true,
+ "spaces_2022_h2_spaces_communities": true,
+ "standardized_nudges_misinfo": false,
+ "tweet_awards_web_tipping_enabled": false,
+ "tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
+ "tweetypie_unmention_optimization_enabled": false,
+ "verified_phone_label_enabled": false,
+ "vibe_api_enabled": false,
+ "view_counts_everywhere_api_enabled": false
+}""".replace(" ", "").replace("\n", "")
tweetVariables* = """{
"focalTweetId": "$1",
$2
- "includePromotedContent": false,
"withBirdwatchNotes": false,
+ "includePromotedContent": false,
+ "withDownvotePerspective": false,
+ "withReactionsMetadata": false,
+ "withReactionsPerspective": false,
+ "withVoice": false
+}"""
+
+ tweetResultVariables* = """{
+ "tweetId": "$1",
+ "includePromotedContent": false,
+ "withDownvotePerspective": false,
+ "withReactionsMetadata": false,
+ "withReactionsPerspective": false,
+ "withVoice": false,
+ "withCommunity": false
+}"""
+
+ userTweetsVariables* = """{
+ "userId": "$1", $2
+ "count": 20,
+ "includePromotedContent": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
- "withSuperFollowsTweetFields": false,
- "withSuperFollowsUserFields": false,
"withVoice": false,
"withV2Timeline": true
}"""
- tweetFeatures* = """{
- "graphql_is_translatable_rweb_tweet_is_translatable_enabled": false,
- "responsive_web_graphql_timeline_navigation_enabled": false,
- "standardized_nudges_misinfo": false,
- "verified_phone_label_enabled": false,
- "responsive_web_twitter_blue_verified_badge_is_enabled": false,
- "tweet_with_visibility_results_prefer_gql_limited_actions_policy_enabled": false,
- "view_counts_everywhere_api_enabled": false,
- "responsive_web_edit_tweet_api_enabled": false,
- "tweetypie_unmention_optimization_enabled": false,
- "vibe_api_enabled": false,
- "longform_notetweets_consumption_enabled": true,
- "responsive_web_text_conversations_enabled": false,
- "responsive_web_enhance_cards_enabled": false,
- "interactive_text_enabled": false
+ listTweetsVariables* = """{
+ "listId": "$1", $2
+ "count": 20,
+ "includePromotedContent": false,
+ "withDownvotePerspective": false,
+ "withReactionsMetadata": false,
+ "withReactionsPerspective": false,
+ "withVoice": false
}"""
diff --git a/src/experimental/parser.nim b/src/experimental/parser.nim
index 98ce7df..40986f5 100644
--- a/src/experimental/parser.nim
+++ b/src/experimental/parser.nim
@@ -1,2 +1,2 @@
-import parser/[user, graphql, timeline]
-export user, graphql, timeline
+import parser/[user, graphql]
+export user, graphql
diff --git a/src/experimental/parser/graphql.nim b/src/experimental/parser/graphql.nim
index 4431db3..36014e3 100644
--- a/src/experimental/parser/graphql.nim
+++ b/src/experimental/parser/graphql.nim
@@ -11,6 +11,7 @@ proc parseGraphUser*(json: string): User =
result = toUser raw.data.user.result.legacy
result.id = raw.data.user.result.restId
+ result.verified = result.verified or raw.data.user.result.isBlueVerified
proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User](
diff --git a/src/experimental/parser/timeline.nim b/src/experimental/parser/timeline.nim
deleted file mode 100644
index 4663d00..0000000
--- a/src/experimental/parser/timeline.nim
+++ /dev/null
@@ -1,30 +0,0 @@
-import std/[strutils, tables]
-import jsony
-import user, ../types/timeline
-from ../../types import Result, User
-
-proc getId(id: string): string {.inline.} =
- let start = id.rfind("-")
- if start < 0: return id
- id[start + 1 ..< id.len]
-
-proc parseUsers*(json: string; after=""): Result[User] =
- result = Result[User](beginning: after.len == 0)
-
- let raw = json.fromJson(Search)
- if raw.timeline.instructions.len == 0:
- return
-
- for i in raw.timeline.instructions:
- if i.addEntries.entries.len > 0:
- for e in i.addEntries.entries:
- let id = e.entryId.getId
- if e.entryId.startsWith("user"):
- if id in raw.globalObjects.users:
- result.content.add toUser raw.globalObjects.users[id]
- elif e.entryId.startsWith("cursor"):
- let cursor = e.content.operation.cursor
- if cursor.cursorType == "Top":
- result.top = cursor.value
- elif cursor.cursorType == "Bottom":
- result.bottom = cursor.value
diff --git a/src/experimental/parser/unifiedcard.nim b/src/experimental/parser/unifiedcard.nim
index 3c5158a..c9af437 100644
--- a/src/experimental/parser/unifiedcard.nim
+++ b/src/experimental/parser/unifiedcard.nim
@@ -84,6 +84,8 @@ proc parseUnifiedCard*(json: string): Card =
component.parseMedia(card, result)
of buttonGroup:
discard
+ of ComponentType.hidden:
+ result.kind = CardKind.hidden
of ComponentType.unknown:
echo "ERROR: Unknown component type: ", json
diff --git a/src/experimental/parser/user.nim b/src/experimental/parser/user.nim
index 715c9a9..b4d710f 100644
--- a/src/experimental/parser/user.nim
+++ b/src/experimental/parser/user.nim
@@ -2,7 +2,7 @@ import std/[algorithm, unicode, re, strutils, strformat, options, nre]
import jsony
import utils, slices
import ../types/user as userType
-from ../../types import User, Error
+from ../../types import Result, User, Error
let
unRegex = re.re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
@@ -76,3 +76,12 @@ proc parseUser*(json: string; username=""): User =
else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser)
+
+proc parseUsers*(json: string; after=""): Result[User] =
+ result = Result[User](beginning: after.len == 0)
+
+ # starting with '{' means it's an error
+ if json[0] == '[':
+ let raw = json.fromJson(seq[RawUser])
+ for user in raw:
+ result.content.add user.toUser
diff --git a/src/experimental/types/graphuser.nim b/src/experimental/types/graphuser.nim
index e13383a..478e7f3 100644
--- a/src/experimental/types/graphuser.nim
+++ b/src/experimental/types/graphuser.nim
@@ -11,4 +11,5 @@ type
UserResult = object
legacy*: RawUser
restId*: string
+ isBlueVerified*: bool
reason*: Option[string]
diff --git a/src/experimental/types/unifiedcard.nim b/src/experimental/types/unifiedcard.nim
index 4ec587c..6e83cad 100644
--- a/src/experimental/types/unifiedcard.nim
+++ b/src/experimental/types/unifiedcard.nim
@@ -17,6 +17,7 @@ type
twitterListDetails
communityDetails
mediaWithDetailsHorizontal
+ hidden
unknown
Component* = object
@@ -71,11 +72,11 @@ type
Text = object
content: string
- HasTypeField = Component | Destination | MediaEntity | AppStoreData
+ TypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = text.content
-proc renameHook*(v: var HasTypeField; fieldName: var string) =
+proc renameHook*(v: var TypeField; fieldName: var string) =
if fieldName == "type":
fieldName = "kind"
@@ -89,6 +90,7 @@ proc enumHook*(s: string; v: var ComponentType) =
of "twitter_list_details": twitterListDetails
of "community_details": communityDetails
of "media_with_details_horizontal": mediaWithDetailsHorizontal
+ of "commerce_drop_details": hidden
else: echo "ERROR: Unknown enum value (ComponentType): ", s; unknown
proc enumHook*(s: string; v: var AppType) =
diff --git a/src/http_pool.nim b/src/http_pool.nim
index 2037520..b4e3cee 100644
--- a/src/http_pool.nim
+++ b/src/http_pool.nim
@@ -42,5 +42,11 @@ template use*(pool: HttpPool; heads: HttpHeaders; body: untyped): untyped =
except ProtocolError:
# Twitter closed the connection, retry
body
+ except BadClientError:
+ # Twitter returned 503, we need a new client
+ pool.release(c, true)
+ badClient = false
+ c = pool.acquire(heads)
+ body
finally:
pool.release(c, badClient)
diff --git a/src/nitter.nim b/src/nitter.nim
index 2e868a4..627af75 100644
--- a/src/nitter.nim
+++ b/src/nitter.nim
@@ -85,19 +85,23 @@ routes:
resp Http500, showError(
&"An error occurred, please {link} with the URL you tried to visit.", cfg)
+ error BadClientError:
+ echo error.exc.name, ": ", error.exc.msg
+ resp Http500, showError("Network error occured, please try again.", cfg)
+
error RateLimitError:
const link = a("another instance", href = instancesUrl)
resp Http429, showError(
&"Instance has been rate limited.
Use {link} or try again later.", cfg)
- extend unsupported, ""
- extend preferences, ""
- extend resolver, ""
extend rss, ""
+ extend status, ""
extend search, ""
extend timeline, ""
- extend list, ""
- extend status, ""
extend media, ""
+ extend list, ""
+ extend preferences, ""
+ extend resolver, ""
extend embed, ""
extend debug, ""
+ extend unsupported, ""
diff --git a/src/parser.nim b/src/parser.nim
index fa877f9..fe2fe5b 100644
--- a/src/parser.nim
+++ b/src/parser.nim
@@ -4,6 +4,8 @@ import packedjson, packedjson/deserialiser
import types, parserutils, utils
import experimental/parser/unifiedcard
+proc parseGraphTweet(js: JsonNode): Tweet
+
proc parseUser(js: JsonNode; id=""): User =
if js.isNull: return
result = User(
@@ -19,13 +21,20 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt,
- verified: js{"verified"}.getBool,
+ verified: js{"verified"}.getBool or js{"ext_is_blue_verified"}.getBool,
protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime
)
result.expandUserEntities(js)
+proc parseGraphUser(js: JsonNode): User =
+ let user = ? js{"user_results", "result"}
+ result = parseUser(user{"legacy"})
+
+ if "is_blue_verified" in user:
+ result.verified = true
+
proc parseGraphList*(js: JsonNode): List =
if js.isNull: return
@@ -38,11 +47,11 @@ proc parseGraphList*(js: JsonNode): List =
result = List(
id: list{"id_str"}.getStr,
name: list{"name"}.getStr,
- username: list{"user", "legacy", "screen_name"}.getStr,
- userId: list{"user", "rest_id"}.getStr,
+ username: list{"user_results", "result", "legacy", "screen_name"}.getStr,
+ userId: list{"user_results", "result", "rest_id"}.getStr,
description: list{"description"}.getStr,
members: list{"member_count"}.getInt,
- banner: list{"custom_banner_media", "media_info", "url"}.getImageStr
+ banner: list{"custom_banner_media", "media_info", "original_img_url"}.getImageStr
)
proc parsePoll(js: JsonNode): Poll =
@@ -213,10 +222,18 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
if js{"is_quote_status"}.getBool:
result.quote = some Tweet(id: js{"quoted_status_id_str"}.getId)
+ # legacy
with rt, js{"retweeted_status_id_str"}:
result.retweet = some Tweet(id: rt.getId)
return
+ # graphql
+ with rt, js{"retweeted_status_result", "result"}:
+ # needed due to weird edgecase where the actual tweet data isn't included
+ if "legacy" in rt:
+ result.retweet = some parseGraphTweet(rt)
+ return
+
if jsCard.kind != JNull:
let name = jsCard{"name"}.getStr
if "poll" in name:
@@ -237,7 +254,10 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
of "video":
result.video = some(parseVideo(m))
with user, m{"additional_media_info", "source_user"}:
- result.attribution = some(parseUser(user))
+ if user{"id"}.getInt > 0:
+ result.attribution = some(parseUser(user))
+ else:
+ result.attribution = some(parseGraphUser(user))
of "animated_gif":
result.gif = some(parseGif(m))
else: discard
@@ -299,19 +319,6 @@ proc parseGlobalObjects(js: JsonNode): GlobalObjects =
tweet.user = result.users[tweet.user.id]
result.tweets[k] = tweet
-proc parseStatus*(js: JsonNode): Tweet =
- with e, js{"errors"}:
- if e.getError in {tweetNotFound, tweetUnavailable, tweetCensored, doesntExist,
- tweetNotAuthorized, suspended}:
- return
-
- result = parseTweet(js, js{"card"})
- if not result.isNil:
- result.user = parseUser(js{"user"})
-
- with quote, js{"quoted_status"}:
- result.quote = some parseStatus(js{"quoted_status"})
-
proc parseInstructions[T](res: var Result[T]; global: GlobalObjects; js: JsonNode) =
if js.kind != JArray or js.len == 0:
return
@@ -352,7 +359,7 @@ proc parseTimeline*(js: JsonNode; after=""): Timeline =
result.top = e.getCursor
elif "cursor-bottom" in entry:
result.bottom = e.getCursor
- elif entry.startsWith("sq-C"):
+ elif entry.startsWith("sq-cursor"):
with cursor, e{"content", "operation", "cursor"}:
if cursor{"cursorType"}.getStr == "Bottom":
result.bottom = cursor{"value"}.getStr
@@ -373,9 +380,20 @@ proc parsePhotoRail*(js: JsonNode): PhotoRail =
result.add GalleryPhoto(url: url, tweetId: $t.id)
proc parseGraphTweet(js: JsonNode): Tweet =
- if js.kind == JNull or js{"__typename"}.getStr == "TweetUnavailable":
+ if js.kind == JNull:
return Tweet(available: false)
+ case js{"__typename"}.getStr
+ of "TweetUnavailable":
+ return Tweet(available: false)
+ of "TweetTombstone":
+ return Tweet(
+ available: false,
+ text: js{"tombstone", "text"}.getTombstone
+ )
+ of "TweetWithVisibilityResults":
+ return parseGraphTweet(js{"tweet"})
+
var jsCard = copy(js{"card", "legacy"})
if jsCard.kind != JNull:
var values = newJObject()
@@ -384,7 +402,7 @@ proc parseGraphTweet(js: JsonNode): Tweet =
jsCard["binding_values"] = values
result = parseTweet(js{"legacy"}, jsCard)
- result.user = parseUser(js{"core", "user_results", "result", "legacy"})
+ result.user = parseGraphUser(js{"core"})
with noteTweet, js{"note_tweet", "note_tweet_results", "result"}:
result.expandNoteTweetEntities(noteTweet)
@@ -407,10 +425,14 @@ proc parseGraphThread(js: JsonNode): tuple[thread: Chain; self: bool] =
if t{"item", "itemContent", "tweetDisplayType"}.getStr == "SelfThread":
result.self = true
+proc parseGraphTweetResult*(js: JsonNode): Tweet =
+ with tweet, js{"data", "tweetResult", "result"}:
+ result = parseGraphTweet(tweet)
+
proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result = Conversation(replies: Result[Chain](beginning: true))
- let instructions = ? js{"data", "threaded_conversation_with_injections_v2", "instructions"}
+ let instructions = ? js{"data", "threaded_conversation_with_injections", "instructions"}
if instructions.len == 0:
return
@@ -418,12 +440,25 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
let entryId = e{"entryId"}.getStr
# echo entryId
if entryId.startsWith("tweet"):
- let tweet = parseGraphTweet(e{"content", "itemContent", "tweet_results", "result"})
+ with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
+ let tweet = parseGraphTweet(tweetResult)
- if not tweet.available:
- tweet.id = parseBiggestInt(entryId.getId())
+ if not tweet.available:
+ tweet.id = parseBiggestInt(entryId.getId())
- if $tweet.id == tweetId:
+ if $tweet.id == tweetId:
+ result.tweet = tweet
+ else:
+ result.before.content.add tweet
+ elif entryId.startsWith("tombstone"):
+ let id = entryId.getId()
+ let tweet = Tweet(
+ id: parseBiggestInt(id),
+ available: false,
+ text: e{"content", "itemContent", "tombstoneInfo", "richText"}.getTombstone
+ )
+
+ if id == tweetId:
result.tweet = tweet
else:
result.before.content.add tweet
@@ -435,3 +470,50 @@ proc parseGraphConversation*(js: JsonNode; tweetId: string): Conversation =
result.replies.content.add thread
elif entryId.startsWith("cursor-bottom"):
result.replies.bottom = e{"content", "itemContent", "value"}.getStr
+
+proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Timeline =
+ result = Timeline(beginning: after.len == 0)
+
+ let instructions =
+ if root == "list": ? js{"data", "list", "tweets_timeline", "timeline", "instructions"}
+ else: ? js{"data", "user", "result", "timeline_v2", "timeline", "instructions"}
+
+ if instructions.len == 0:
+ return
+
+ for i in instructions:
+ if i{"type"}.getStr == "TimelineAddEntries":
+ for e in i{"entries"}:
+ let entryId = e{"entryId"}.getStr
+ if entryId.startsWith("tweet"):
+ with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
+ let tweet = parseGraphTweet(tweetResult)
+ if not tweet.available:
+ tweet.id = parseBiggestInt(entryId.getId())
+ result.content.add tweet
+ elif entryId.startsWith("cursor-bottom"):
+ result.bottom = e{"content", "value"}.getStr
+
+proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
+ result = Timeline(beginning: after.len == 0)
+
+ let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
+ if instructions.len == 0:
+ return
+
+ for instruction in instructions:
+ let typ = instruction{"type"}.getStr
+ if typ == "TimelineAddEntries":
+ for e in instructions[0]{"entries"}:
+ let entryId = e{"entryId"}.getStr
+ if entryId.startsWith("tweet"):
+ with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
+ let tweet = parseGraphTweet(tweetResult)
+ if not tweet.available:
+ tweet.id = parseBiggestInt(entryId.getId())
+ result.content.add tweet
+ elif entryId.startsWith("cursor-bottom"):
+ result.bottom = e{"content", "value"}.getStr
+ elif typ == "TimelineReplaceEntry":
+ if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
+ result.bottom = instruction{"entry", "content", "value"}.getStr
diff --git a/src/parserutils.nim b/src/parserutils.nim
index 8ae9cd0..f28bd52 100644
--- a/src/parserutils.nim
+++ b/src/parserutils.nim
@@ -130,7 +130,7 @@ proc getBanner*(js: JsonNode): string =
return
proc getTombstone*(js: JsonNode): string =
- result = js{"tombstoneInfo", "richText", "text"}.getStr
+ result = js{"text"}.getStr
result.removeSuffix(" Learn more")
proc getMp4Resolution*(url: string): int =
diff --git a/src/redis_cache.nim b/src/redis_cache.nim
index 742b7ae..89161be 100644
--- a/src/redis_cache.nim
+++ b/src/redis_cache.nim
@@ -153,7 +153,7 @@ proc getCachedTweet*(id: int64): Future[Tweet] {.async.} =
if tweet != redisNil:
tweet.deserialize(Tweet)
else:
- result = await getStatus($id)
+ result = await getGraphTweetResult($id)
if not result.isNil:
await cache(result)
diff --git a/src/routes/list.nim b/src/routes/list.nim
index c97b1c1..ac3e97e 100644
--- a/src/routes/list.nim
+++ b/src/routes/list.nim
@@ -6,7 +6,6 @@ import jester
import router_utils
import ".."/[types, redis_cache, api]
import ../views/[general, timeline, list]
-export getListTimeline, getGraphList
template respList*(list, timeline, title, vnode: typed) =
if list.id.len == 0 or list.name.len == 0:
@@ -39,7 +38,7 @@ proc createListRouter*(cfg: Config) =
let
prefs = cookiePrefs()
list = await getCachedList(id=(@"id"))
- timeline = await getListTimeline(list.id, getCursor())
+ timeline = await getGraphListTweets(list.id, getCursor())
vnode = renderTimelineTweets(timeline, prefs, request.path)
respList(list, timeline, list.title, vnode)
diff --git a/src/routes/rss.nim b/src/routes/rss.nim
index 5da29b0..1323ed3 100644
--- a/src/routes/rss.nim
+++ b/src/routes/rss.nim
@@ -28,7 +28,7 @@ proc timelineRss*(req: Request; cfg: Config; query: Query): Future[Rss] {.async.
var q = query
q.fromUser = names
profile = Profile(
- tweets: await getSearch[Tweet](q, after),
+ tweets: await getGraphSearch(q, after),
# this is kinda dumb
user: User(
username: name,
@@ -78,7 +78,7 @@ proc createRssRouter*(cfg: Config) =
if rss.cursor.len > 0:
respRss(rss, "Search")
- let tweets = await getSearch[Tweet](query, cursor)
+ let tweets = await getGraphSearch(query, cursor)
rss.cursor = tweets.bottom
rss.feed = renderSearchRss(tweets.content, query.text, genQueryUrl(query), cfg)
@@ -159,7 +159,7 @@ proc createRssRouter*(cfg: Config) =
let
list = await getCachedList(id=id)
- timeline = await getListTimeline(list.id, cursor)
+ timeline = await getGraphListTweets(list.id, cursor)
rss.cursor = timeline.bottom
rss.feed = renderListRss(timeline.content, list, cfg)
diff --git a/src/routes/search.nim b/src/routes/search.nim
index b2fd718..02c14e3 100644
--- a/src/routes/search.nim
+++ b/src/routes/search.nim
@@ -27,11 +27,15 @@ proc createSearchRouter*(cfg: Config) =
of users:
if "," in q:
redirect("/" & q)
- let users = await getSearch[User](query, getCursor())
+ var users: Result[User]
+ try:
+ users = await getUserSearch(query, getCursor())
+ except InternalError:
+ users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
of tweets:
let
- tweets = await getSearch[Tweet](query, getCursor())
+ tweets = await getGraphSearch(query, getCursor())
rss = "/search/rss?" & genQueryUrl(query)
resp renderMain(renderTweetSearch(tweets, prefs, getPath()),
request, cfg, prefs, title, rss=rss)
diff --git a/src/routes/status.nim b/src/routes/status.nim
index 1104282..7e89220 100644
--- a/src/routes/status.nim
+++ b/src/routes/status.nim
@@ -16,17 +16,21 @@ proc createStatusRouter*(cfg: Config) =
router status:
get "/@name/status/@id/?":
cond '.' notin @"name"
- cond not @"id".any(c => not c.isDigit)
+ let id = @"id"
+
+ if id.len > 19 or id.any(c => not c.isDigit):
+ resp Http404, showError("Invalid tweet ID", cfg)
+
let prefs = cookiePrefs()
# used for the infinite scroll feature
if @"scroll".len > 0:
- let replies = await getReplies(@"id", getCursor())
+ let replies = await getReplies(id, getCursor())
if replies.content.len == 0:
resp Http404, ""
resp $renderReplies(replies, prefs, getPath())
- let conv = await getTweet(@"id", getCursor())
+ let conv = await getTweet(id, getCursor())
if conv == nil:
echo "nil conv"
diff --git a/src/routes/timeline.nim b/src/routes/timeline.nim
index a0a6e21..331b8ae 100644
--- a/src/routes/timeline.nim
+++ b/src/routes/timeline.nim
@@ -47,10 +47,10 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
let
timeline =
case query.kind
- of posts: getTimeline(userId, after)
- of replies: getTimeline(userId, after, replies=true)
- of media: getMediaTimeline(userId, after)
- else: getSearch[Tweet](query, after)
+ of posts: getGraphUserTweets(userId, TimelineKind.tweets, after)
+ of replies: getGraphUserTweets(userId, TimelineKind.replies, after)
+ of media: getGraphUserTweets(userId, TimelineKind.media, after)
+ else: getGraphSearch(query, after)
rail =
skipIf(skipRail or query.kind == media, @[]):
@@ -64,6 +64,7 @@ proc fetchProfile*(after: string; query: Query; skipRail=false;
let tweet = await getCachedTweet(user.pinnedTweet)
if not tweet.isNil:
tweet.pinned = true
+ tweet.user = user
pinned = some tweet
result = Profile(
@@ -82,7 +83,7 @@ proc showTimeline*(request: Request; query: Query; cfg: Config; prefs: Prefs;
rss, after: string): Future[string] {.async.} =
if query.fromUser.len != 1:
let
- timeline = await getSearch[Tweet](query, after)
+ timeline = await getGraphSearch(query, after)
html = renderTweetSearch(timeline, prefs, getPath())
return renderMain(html, request, cfg, prefs, "Multi", rss=rss)
@@ -123,7 +124,7 @@ proc createTimelineRouter*(cfg: Config) =
get "/@name/?@tab?/?":
cond '.' notin @"name"
- cond @"name" notin ["pic", "gif", "video"]
+ cond @"name" notin ["pic", "gif", "video", "search", "settings", "login", "intent", "i"]
cond @"tab" in ["with_replies", "media", "search", ""]
let
prefs = cookiePrefs()
@@ -137,7 +138,7 @@ proc createTimelineRouter*(cfg: Config) =
# used for the infinite scroll feature
if @"scroll".len > 0:
if query.fromUser.len != 1:
- var timeline = await getSearch[Tweet](query, after)
+ var timeline = await getGraphSearch(query, after)
if timeline.content.len == 0: resp Http404
timeline.beginning = true
resp $renderTweetSearch(timeline, prefs, getPath())
diff --git a/src/tokens.nim b/src/tokens.nim
index e6a4449..6ef81f5 100644
--- a/src/tokens.nim
+++ b/src/tokens.nim
@@ -1,8 +1,7 @@
# SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, httpclient, times, sequtils, json, random
import strutils, tables
-import zippy
-import types, consts, http_pool
+import types, consts
const
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
@@ -11,11 +10,12 @@ const
failDelay = initDuration(minutes=30)
var
- clientPool: HttpPool
tokenPool: seq[Token]
lastFailed: Time
enableLogging = false
+let headers = newHttpHeaders({"authorization": auth})
+
template log(str) =
if enableLogging: echo "[tokens] ", str
@@ -41,10 +41,12 @@ proc getPoolJson*(): JsonNode =
let
maxReqs =
case api
- of Api.listMembers, Api.listBySlug, Api.list,
- Api.userRestId, Api.userScreenName, Api.tweetDetail: 500
of Api.timeline: 187
- else: 180
+ of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets,
+ Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
+ Api.userRestId, Api.userScreenName,
+ Api.tweetDetail, Api.tweetResult, Api.search: 500
+ of Api.userSearch: 900
reqs = maxReqs - token.apis[api].remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
@@ -65,18 +67,12 @@ proc fetchToken(): Future[Token] {.async.} =
if getTime() - lastFailed < failDelay:
raise rateLimitError()
- let headers = newHttpHeaders({
- "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,*/*;q=0.8",
- "accept-encoding": "gzip",
- "accept-language": "en-US,en;q=0.5",
- "connection": "keep-alive",
- "authorization": auth
- })
+ let client = newAsyncHttpClient(headers=headers)
try:
let
- resp = clientPool.use(headers): await c.postContent(activate)
- tokNode = parseJson(uncompress(resp))["guest_token"]
+ resp = await client.postContent(activate)
+ tokNode = parseJson(resp)["guest_token"]
tok = tokNode.getStr($(tokNode.getInt))
time = getTime()
@@ -86,6 +82,8 @@ proc fetchToken(): Future[Token] {.async.} =
if "Try again" notin e.msg:
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
lastFailed = getTime()
+ finally:
+ client.close()
proc expired(token: Token): bool =
let time = getTime()
@@ -158,7 +156,6 @@ proc poolTokens*(amount: int) {.async.} =
tokenPool.add newToken
proc initTokenPool*(cfg: Config) {.async.} =
- clientPool = HttpPool()
enableLogging = cfg.enableDebug
while true:
diff --git a/src/types.nim b/src/types.nim
index 6f742d1..4dca5f0 100644
--- a/src/types.nim
+++ b/src/types.nim
@@ -7,19 +7,28 @@ genPrefsType()
type
RateLimitError* = object of CatchableError
InternalError* = object of CatchableError
+ BadClientError* = object of CatchableError
+
+ TimelineKind* {.pure.} = enum
+ tweets
+ replies
+ media
Api* {.pure.} = enum
tweetDetail
- userShow
+ tweetResult
timeline
search
- tweet
+ userSearch
list
listBySlug
listMembers
+ listTweets
userRestId
userScreenName
- status
+ userTweets
+ userTweetsAndReplies
+ userMedia
RateLimit* = object
remaining*: int
@@ -36,8 +45,10 @@ type
null = 0
noUserMatches = 17
protectedUser = 22
+ missingParams = 25
couldntAuth = 32
doesntExist = 34
+ invalidParam = 47
userNotFound = 50
suspended = 63
rateLimited = 88
@@ -150,6 +161,7 @@ type
imageDirectMessage = "image_direct_message"
audiospace = "audiospace"
newsletterPublication = "newsletter_publication"
+ hidden
unknown
Card* = object
diff --git a/src/views/renderutils.nim b/src/views/renderutils.nim
index cdfeb28..9dffdcb 100644
--- a/src/views/renderutils.nim
+++ b/src/views/renderutils.nim
@@ -59,8 +59,7 @@ proc buttonReferer*(action, text, path: string; class=""; `method`="post"): VNod
proc genCheckbox*(pref, label: string; state: bool): VNode =
buildHtml(label(class="pref-group checkbox-container")):
text label
- if state: input(name=pref, `type`="checkbox", checked="")
- else: input(name=pref, `type`="checkbox")
+ input(name=pref, `type`="checkbox", checked=state)
span(class="checkbox")
proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true): VNode =
@@ -68,20 +67,15 @@ proc genInput*(pref, label, state, placeholder: string; class=""; autofocus=true
buildHtml(tdiv(class=("pref-group pref-input " & class))):
if label.len > 0:
label(`for`=pref): text label
- if autofocus and state.len == 0:
- input(name=pref, `type`="text", placeholder=p, value=state, autofocus="")
- else:
- input(name=pref, `type`="text", placeholder=p, value=state)
+ input(name=pref, `type`="text", placeholder=p, value=state, autofocus=(autofocus and state.len == 0))
proc genSelect*(pref, label, state: string; options: seq[string]): VNode =
buildHtml(tdiv(class="pref-group pref-input")):
label(`for`=pref): text label
select(name=pref):
for opt in options:
- if opt == state:
- option(value=opt, selected=""): text opt
- else:
- option(value=opt): text opt
+ option(value=opt, selected=(opt == state)):
+ text opt
proc genDate*(pref, state: string): VNode =
buildHtml(span(class="date-input")):
@@ -93,12 +87,9 @@ proc genImg*(url: string; class=""): VNode =
img(src=getPicUrl(url), class=class, alt="")
proc getTabClass*(query: Query; tab: QueryKind): string =
- result = "tab-item"
- if query.kind == tab:
- result &= " active"
+ if query.kind == tab: "tab-item active"
+ else: "tab-item"
proc getAvatarClass*(prefs: Prefs): string =
- if prefs.squareAvatars:
- "avatar"
- else:
- "avatar round"
+ if prefs.squareAvatars: "avatar"
+ else: "avatar round"
diff --git a/src/views/search.nim b/src/views/search.nim
index 77ba14f..72c59f5 100644
--- a/src/views/search.nim
+++ b/src/views/search.nim
@@ -63,12 +63,10 @@ proc renderSearchPanel*(query: Query): VNode =
hiddenField("f", "tweets")
genInput("q", "", query.text, "Enter search...", class="pref-inline")
button(`type`="submit"): icon "search"
- if isPanelOpen(query):
- input(id="search-panel-toggle", `type`="checkbox", checked="")
- else:
- input(id="search-panel-toggle", `type`="checkbox")
- label(`for`="search-panel-toggle"):
- icon "down"
+
+ input(id="search-panel-toggle", `type`="checkbox", checked=isPanelOpen(query))
+ label(`for`="search-panel-toggle"): icon "down"
+
tdiv(class="search-panel"):
for f in @["filter", "exclude"]:
span(class="search-title"): text capitalize(f)
diff --git a/src/views/tweet.nim b/src/views/tweet.nim
index ea94e28..3338b71 100644
--- a/src/views/tweet.nim
+++ b/src/views/tweet.nim
@@ -106,14 +106,10 @@ proc renderVideo*(video: Video; prefs: Prefs; path: string): VNode =
else: vidUrl
case playbackType
of mp4:
- if prefs.muteVideos:
- video(poster=thumb, controls="", muted=""):
- source(src=source, `type`="video/mp4")
- else:
- video(poster=thumb, controls=""):
- source(src=source, `type`="video/mp4")
+ video(poster=thumb, controls="", muted=prefs.muteVideos):
+ source(src=source, `type`="video/mp4")
of m3u8, vmap:
- video(poster=thumb, data-url=source, data-autoload="false")
+ video(poster=thumb, data-url=source, data-autoload="false", muted=prefs.muteVideos)
verbatim "