Switch back to old user search endpoint
This commit is contained in:
parent
6e8744943f
commit
8df5256c1d
7 changed files with 28 additions and 34 deletions
28
src/api.nim
28
src/api.nim
|
@ -112,29 +112,25 @@ proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
|
||||||
if after.len > 0:
|
if after.len > 0:
|
||||||
variables["cursor"] = % after
|
variables["cursor"] = % after
|
||||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
||||||
result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
|
result = parseGraphSearch(await fetch(url, Api.search), after)
|
||||||
result.query = query
|
result.query = query
|
||||||
|
|
||||||
proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
|
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
|
||||||
if query.text.len == 0:
|
if query.text.len == 0:
|
||||||
return Result[User](query: query, beginning: true)
|
return Result[User](query: query, beginning: true)
|
||||||
|
|
||||||
var
|
let
|
||||||
variables = %*{
|
page = if page.len == 0: "1" else: page
|
||||||
"rawQuery": query.text,
|
url = userSearch ? genParams({"q": query.text, "skip_status": "1", "page": page})
|
||||||
"count": 20,
|
js = await fetchRaw(url, Api.userSearch)
|
||||||
"product": "People",
|
|
||||||
"withDownvotePerspective": false,
|
result = parseUsers(js)
|
||||||
"withReactionsMetadata": false,
|
|
||||||
"withReactionsPerspective": false
|
|
||||||
}
|
|
||||||
if after.len > 0:
|
|
||||||
variables["cursor"] = % after
|
|
||||||
result.beginning = false
|
|
||||||
|
|
||||||
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
|
|
||||||
result = parseGraphSearch[User](await fetch(url, Api.search), after)
|
|
||||||
result.query = query
|
result.query = query
|
||||||
|
if page.len == 0:
|
||||||
|
result.bottom = "2"
|
||||||
|
elif page.allCharsInSet(Digits):
|
||||||
|
result.bottom = $(parseInt(page) + 1)
|
||||||
|
|
||||||
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
|
||||||
if name.len == 0: return
|
if name.len == 0: return
|
||||||
|
|
|
@ -9,6 +9,7 @@ const
|
||||||
activate* = $(api / "1.1/guest/activate.json")
|
activate* = $(api / "1.1/guest/activate.json")
|
||||||
|
|
||||||
photoRail* = api / "1.1/statuses/media_timeline.json"
|
photoRail* = api / "1.1/statuses/media_timeline.json"
|
||||||
|
userSearch* = api / "1.1/users/search.json"
|
||||||
|
|
||||||
graphql = api / "graphql"
|
graphql = api / "graphql"
|
||||||
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
||||||
|
@ -34,6 +35,7 @@ const
|
||||||
"include_user_entities": "1",
|
"include_user_entities": "1",
|
||||||
"include_ext_reply_count": "1",
|
"include_ext_reply_count": "1",
|
||||||
"include_ext_is_blue_verified": "1",
|
"include_ext_is_blue_verified": "1",
|
||||||
|
#"include_ext_verified_type": "1",
|
||||||
"include_ext_media_color": "0",
|
"include_ext_media_color": "0",
|
||||||
"cards_platform": "Web-13",
|
"cards_platform": "Web-13",
|
||||||
"tweet_mode": "extended",
|
"tweet_mode": "extended",
|
||||||
|
|
|
@ -56,7 +56,7 @@ proc toUser*(raw: RawUser): User =
|
||||||
tweets: raw.statusesCount,
|
tweets: raw.statusesCount,
|
||||||
likes: raw.favouritesCount,
|
likes: raw.favouritesCount,
|
||||||
media: raw.mediaCount,
|
media: raw.mediaCount,
|
||||||
verified: raw.verified,
|
verified: raw.verified or raw.extIsBlueVerified,
|
||||||
protected: raw.protected,
|
protected: raw.protected,
|
||||||
joinDate: parseTwitterDate(raw.createdAt),
|
joinDate: parseTwitterDate(raw.createdAt),
|
||||||
banner: getBanner(raw),
|
banner: getBanner(raw),
|
||||||
|
|
|
@ -16,6 +16,7 @@ type
|
||||||
statusesCount*: int
|
statusesCount*: int
|
||||||
mediaCount*: int
|
mediaCount*: int
|
||||||
verified*: bool
|
verified*: bool
|
||||||
|
extIsBlueVerified*: bool
|
||||||
protected*: bool
|
protected*: bool
|
||||||
profileLinkColor*: string
|
profileLinkColor*: string
|
||||||
profileBannerUrl*: string
|
profileBannerUrl*: string
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
# SPDX-License-Identifier: AGPL-3.0-only
|
# SPDX-License-Identifier: AGPL-3.0-only
|
||||||
import strutils, options, times, math, tables
|
import strutils, options, times, math
|
||||||
import packedjson, packedjson/deserialiser
|
import packedjson, packedjson/deserialiser
|
||||||
import types, parserutils, utils
|
import types, parserutils, utils
|
||||||
import experimental/parser/unifiedcard
|
import experimental/parser/unifiedcard
|
||||||
|
@ -436,8 +436,8 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
|
||||||
tweet.id = parseBiggestInt(entryId)
|
tweet.id = parseBiggestInt(entryId)
|
||||||
result.pinned = some tweet
|
result.pinned = some tweet
|
||||||
|
|
||||||
proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
|
proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
|
||||||
result = Result[T](beginning: after.len == 0)
|
result = Timeline(beginning: after.len == 0)
|
||||||
|
|
||||||
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
|
||||||
if instructions.len == 0:
|
if instructions.len == 0:
|
||||||
|
@ -448,19 +448,13 @@ proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
|
||||||
if typ == "TimelineAddEntries":
|
if typ == "TimelineAddEntries":
|
||||||
for e in instruction{"entries"}:
|
for e in instruction{"entries"}:
|
||||||
let entryId = e{"entryId"}.getStr
|
let entryId = e{"entryId"}.getStr
|
||||||
when T is Tweets:
|
|
||||||
if entryId.startsWith("tweet"):
|
if entryId.startsWith("tweet"):
|
||||||
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
|
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
|
||||||
let tweet = parseGraphTweet(tweetRes)
|
let tweet = parseGraphTweet(tweetRes)
|
||||||
if not tweet.available:
|
if not tweet.available:
|
||||||
tweet.id = parseBiggestInt(entryId.getId())
|
tweet.id = parseBiggestInt(entryId.getId())
|
||||||
result.content.add tweet
|
result.content.add tweet
|
||||||
elif T is User:
|
elif entryId.startsWith("cursor-bottom"):
|
||||||
if entryId.startsWith("user"):
|
|
||||||
with userRes, e{"content", "itemContent"}:
|
|
||||||
result.content.add parseGraphUser(userRes)
|
|
||||||
|
|
||||||
if entryId.startsWith("cursor-bottom"):
|
|
||||||
result.bottom = e{"content", "value"}.getStr
|
result.bottom = e{"content", "value"}.getStr
|
||||||
elif typ == "TimelineReplaceEntry":
|
elif typ == "TimelineReplaceEntry":
|
||||||
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
|
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):
|
||||||
|
|
|
@ -29,7 +29,7 @@ proc createSearchRouter*(cfg: Config) =
|
||||||
redirect("/" & q)
|
redirect("/" & q)
|
||||||
var users: Result[User]
|
var users: Result[User]
|
||||||
try:
|
try:
|
||||||
users = await getGraphUserSearch(query, getCursor())
|
users = await getUserSearch(query, getCursor())
|
||||||
except InternalError:
|
except InternalError:
|
||||||
users = Result[User](beginning: true, query: query)
|
users = Result[User](beginning: true, query: query)
|
||||||
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
|
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)
|
||||||
|
|
|
@ -19,6 +19,7 @@ type
|
||||||
tweetResult
|
tweetResult
|
||||||
photoRail
|
photoRail
|
||||||
search
|
search
|
||||||
|
userSearch
|
||||||
list
|
list
|
||||||
listBySlug
|
listBySlug
|
||||||
listMembers
|
listMembers
|
||||||
|
|
Loading…
Reference in a new issue