Merge remote-tracking branch 'privacydevel/master'
This commit is contained in:
commit
4cf25d20c7
11 changed files with 87 additions and 25 deletions
10
README.md
10
README.md
|
@ -33,6 +33,10 @@ XMR: 42hKayRoEAw4D6G6t8mQHPJHQcXqofjFuVfavqKeNMNUZfeJLJAcNU19i1bGdDvcdN6romiSscW
|
||||||
- Archiving tweets/profiles
|
- Archiving tweets/profiles
|
||||||
- Developer API
|
- Developer API
|
||||||
|
|
||||||
|
## New Features
|
||||||
|
|
||||||
|
- Likes tab
|
||||||
|
|
||||||
## Resources
|
## Resources
|
||||||
|
|
||||||
The wiki contains
|
The wiki contains
|
||||||
|
@ -99,6 +103,12 @@ $ nimble md
|
||||||
$ cp nitter.example.conf nitter.conf
|
$ cp nitter.example.conf nitter.conf
|
||||||
```
|
```
|
||||||
|
|
||||||
|
Edit `twitter_oauth.sh` with your Twitter account name and password.
|
||||||
|
|
||||||
|
```
|
||||||
|
$ ./twitter_oauth.sh | tee -a guest_accounts.jsonl
|
||||||
|
```
|
||||||
|
|
||||||
Set your hostname, port, HMAC key, https (must be correct for cookies), and
|
Set your hostname, port, HMAC key, https (must be correct for cookies), and
|
||||||
Redis info in `nitter.conf`. To run Redis, either run
|
Redis info in `nitter.conf`. To run Redis, either run
|
||||||
`redis-server --daemonize yes`, or `systemctl enable --now redis` (or
|
`redis-server --daemonize yes`, or `systemctl enable --now redis` (or
|
||||||
|
|
|
@ -33,9 +33,6 @@ tokenCount = 10
|
||||||
# always at least `tokenCount` usable tokens. only increase this if you receive
|
# always at least `tokenCount` usable tokens. only increase this if you receive
|
||||||
# major bursts all the time and don't have a rate limiting setup via e.g. nginx
|
# major bursts all the time and don't have a rate limiting setup via e.g. nginx
|
||||||
|
|
||||||
#cookieHeader = "ct0=XXXXXXXXXXXXXXXXX; auth_token=XXXXXXXXXXXXXX" # authentication cookie of a logged in account, required for the likes tab and NSFW content
|
|
||||||
#xCsrfToken = "XXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX" # required for the likes tab and NSFW content
|
|
||||||
|
|
||||||
# Change default preferences here, see src/prefs_impl.nim for a complete list
|
# Change default preferences here, see src/prefs_impl.nim for a complete list
|
||||||
[Preferences]
|
[Preferences]
|
||||||
theme = "Nitter"
|
theme = "Nitter"
|
||||||
|
|
BIN
screenshot.png
BIN
screenshot.png
Binary file not shown.
Before Width: | Height: | Size: 957 KiB After Width: | Height: | Size: 797 KiB |
18
src/api.nim
18
src/api.nim
|
@ -71,10 +71,20 @@ proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.}
|
||||||
|
|
||||||
proc getFavorites*(id: string; cfg: Config; after=""): Future[Profile] {.async.} =
|
proc getFavorites*(id: string; cfg: Config; after=""): Future[Profile] {.async.} =
|
||||||
if id.len == 0: return
|
if id.len == 0: return
|
||||||
let
|
var
|
||||||
ps = genParams({"userId": id}, after)
|
variables = %*{
|
||||||
url = consts.favorites / (id & ".json") ? ps
|
"userId": id,
|
||||||
result = parseTimeline(await fetch(url, Api.favorites), after)
|
"includePromotedContent":false,
|
||||||
|
"withClientEventToken":false,
|
||||||
|
"withBirdwatchNotes":false,
|
||||||
|
"withVoice":true,
|
||||||
|
"withV2Timeline":false
|
||||||
|
}
|
||||||
|
if after.len > 0:
|
||||||
|
variables["cursor"] = % after
|
||||||
|
let
|
||||||
|
url = consts.favorites ? {"variables": $variables, "features": gqlFeatures}
|
||||||
|
result = parseGraphTimeline(await fetch(url, Api.favorites), after)
|
||||||
|
|
||||||
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
|
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
|
||||||
if id.len == 0: return
|
if id.len == 0: return
|
||||||
|
|
|
@ -49,7 +49,7 @@ proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||||
|
|
||||||
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
||||||
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
||||||
|
|
||||||
result = newHttpHeaders({
|
result = newHttpHeaders({
|
||||||
"connection": "keep-alive",
|
"connection": "keep-alive",
|
||||||
"authorization": header,
|
"authorization": header,
|
||||||
|
@ -149,11 +149,6 @@ template retry(bod) =
|
||||||
|
|
||||||
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
||||||
|
|
||||||
if len(cfg.cookieHeader) != 0:
|
|
||||||
additional_headers.add("Cookie", cfg.cookieHeader)
|
|
||||||
if len(cfg.xCsrfToken) != 0:
|
|
||||||
additional_headers.add("x-csrf-token", cfg.xCsrfToken)
|
|
||||||
|
|
||||||
retry:
|
retry:
|
||||||
var body: string
|
var body: string
|
||||||
fetchImpl(body, additional_headers):
|
fetchImpl(body, additional_headers):
|
||||||
|
|
|
@ -41,9 +41,7 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
|
||||||
enableRss: cfg.get("Config", "enableRSS", true),
|
enableRss: cfg.get("Config", "enableRSS", true),
|
||||||
enableDebug: cfg.get("Config", "enableDebug", false),
|
enableDebug: cfg.get("Config", "enableDebug", false),
|
||||||
proxy: cfg.get("Config", "proxy", ""),
|
proxy: cfg.get("Config", "proxy", ""),
|
||||||
proxyAuth: cfg.get("Config", "proxyAuth", ""),
|
proxyAuth: cfg.get("Config", "proxyAuth", "")
|
||||||
cookieHeader: cfg.get("Config", "cookieHeader", ""),
|
|
||||||
xCsrfToken: cfg.get("Config", "xCsrfToken", "")
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return (conf, cfg)
|
return (conf, cfg)
|
||||||
|
|
|
@ -11,7 +11,6 @@ const
|
||||||
photoRail* = api / "1.1/statuses/media_timeline.json"
|
photoRail* = api / "1.1/statuses/media_timeline.json"
|
||||||
|
|
||||||
timelineApi = api / "2/timeline"
|
timelineApi = api / "2/timeline"
|
||||||
favorites* = timelineApi / "favorites"
|
|
||||||
|
|
||||||
graphql = api / "graphql"
|
graphql = api / "graphql"
|
||||||
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
graphUser* = graphql / "u7wQyGi6oExe8_TRWGMq4Q/UserResultByScreenNameQuery"
|
||||||
|
@ -30,6 +29,7 @@ const
|
||||||
graphRetweeters* = graphql / "RCR9gqwYD1NEgi9FWzA50A/Retweeters"
|
graphRetweeters* = graphql / "RCR9gqwYD1NEgi9FWzA50A/Retweeters"
|
||||||
graphFollowers* = graphql / "EAqBhgcGr_qPOzhS4Q3scQ/Followers"
|
graphFollowers* = graphql / "EAqBhgcGr_qPOzhS4Q3scQ/Followers"
|
||||||
graphFollowing* = graphql / "JPZiqKjET7_M1r5Tlr8pyA/Following"
|
graphFollowing* = graphql / "JPZiqKjET7_M1r5Tlr8pyA/Following"
|
||||||
|
favorites* = graphql / "eSSNbhECHHWWALkkQq-YTA/Likes"
|
||||||
|
|
||||||
timelineParams* = {
|
timelineParams* = {
|
||||||
"include_can_media_tag": "1",
|
"include_can_media_tag": "1",
|
||||||
|
@ -50,6 +50,7 @@ const
|
||||||
gqlFeatures* = """{
|
gqlFeatures* = """{
|
||||||
"android_graphql_skip_api_media_color_palette": false,
|
"android_graphql_skip_api_media_color_palette": false,
|
||||||
"blue_business_profile_image_shape_enabled": false,
|
"blue_business_profile_image_shape_enabled": false,
|
||||||
|
"c9s_tweet_anatomy_moderator_badge_enabled": false,
|
||||||
"creator_subscriptions_subscription_count_enabled": false,
|
"creator_subscriptions_subscription_count_enabled": false,
|
||||||
"creator_subscriptions_tweet_preview_api_enabled": true,
|
"creator_subscriptions_tweet_preview_api_enabled": true,
|
||||||
"freedom_of_speech_not_reach_fetch_enabled": false,
|
"freedom_of_speech_not_reach_fetch_enabled": false,
|
||||||
|
@ -71,6 +72,7 @@ const
|
||||||
"responsive_web_twitter_article_tweet_consumption_enabled": false,
|
"responsive_web_twitter_article_tweet_consumption_enabled": false,
|
||||||
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
|
"responsive_web_twitter_blue_verified_badge_is_enabled": true,
|
||||||
"rweb_lists_timeline_redesign_enabled": true,
|
"rweb_lists_timeline_redesign_enabled": true,
|
||||||
|
"rweb_video_timestamps_enabled": true,
|
||||||
"spaces_2022_h2_clipping": true,
|
"spaces_2022_h2_clipping": true,
|
||||||
"spaces_2022_h2_spaces_communities": true,
|
"spaces_2022_h2_spaces_communities": true,
|
||||||
"standardized_nudges_misinfo": false,
|
"standardized_nudges_misinfo": false,
|
||||||
|
|
|
@ -33,7 +33,8 @@ proc parseGraphUser(js: JsonNode): User =
|
||||||
var user = js{"user_result", "result"}
|
var user = js{"user_result", "result"}
|
||||||
if user.isNull:
|
if user.isNull:
|
||||||
user = ? js{"user_results", "result"}
|
user = ? js{"user_results", "result"}
|
||||||
result = parseUser(user{"legacy"}, user{"rest_id"}.getStr)
|
|
||||||
|
result = parseUser(user{"legacy"})
|
||||||
|
|
||||||
if result.verifiedType == VerifiedType.none and user{"is_blue_verified"}.getBool(false):
|
if result.verifiedType == VerifiedType.none and user{"is_blue_verified"}.getBool(false):
|
||||||
result.verifiedType = blue
|
result.verifiedType = blue
|
||||||
|
@ -534,7 +535,8 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
|
||||||
|
|
||||||
let instructions =
|
let instructions =
|
||||||
if root == "list": ? js{"data", "list", "timeline_response", "timeline", "instructions"}
|
if root == "list": ? js{"data", "list", "timeline_response", "timeline", "instructions"}
|
||||||
else: ? js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
elif root == "user": ? js{"data", "user_result", "result", "timeline_response", "timeline", "instructions"}
|
||||||
|
else: ? js{"data", "user", "result", "timeline", "timeline", "instructions"}
|
||||||
|
|
||||||
if instructions.len == 0:
|
if instructions.len == 0:
|
||||||
return
|
return
|
||||||
|
@ -554,6 +556,21 @@ proc parseGraphTimeline*(js: JsonNode; root: string; after=""): Profile =
|
||||||
result.tweets.content.add thread.content
|
result.tweets.content.add thread.content
|
||||||
elif entryId.startsWith("cursor-bottom"):
|
elif entryId.startsWith("cursor-bottom"):
|
||||||
result.tweets.bottom = e{"content", "value"}.getStr
|
result.tweets.bottom = e{"content", "value"}.getStr
|
||||||
|
# TODO cleanup
|
||||||
|
if i{"type"}.getStr == "TimelineAddEntries":
|
||||||
|
for e in i{"entries"}:
|
||||||
|
let entryId = e{"entryId"}.getStr
|
||||||
|
if entryId.startsWith("tweet"):
|
||||||
|
with tweetResult, e{"content", "itemContent", "tweet_results", "result"}:
|
||||||
|
let tweet = parseGraphTweet(tweetResult, false)
|
||||||
|
if not tweet.available:
|
||||||
|
tweet.id = parseBiggestInt(entryId.getId())
|
||||||
|
result.tweets.content.add tweet
|
||||||
|
elif "-conversation-" in entryId or entryId.startsWith("homeConversation"):
|
||||||
|
let (thread, self) = parseGraphThread(e)
|
||||||
|
result.tweets.content.add thread.content
|
||||||
|
elif entryId.startsWith("cursor-bottom"):
|
||||||
|
result.tweets.bottom = e{"content", "value"}.getStr
|
||||||
if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry":
|
if after.len == 0 and i{"__typename"}.getStr == "TimelinePinEntry":
|
||||||
with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}:
|
with tweetResult, i{"entry", "content", "content", "tweetResult", "result"}:
|
||||||
let tweet = parseGraphTweet(tweetResult, false)
|
let tweet = parseGraphTweet(tweetResult, false)
|
||||||
|
|
|
@ -282,9 +282,7 @@ type
|
||||||
redisConns*: int
|
redisConns*: int
|
||||||
redisMaxConns*: int
|
redisMaxConns*: int
|
||||||
redisPassword*: string
|
redisPassword*: string
|
||||||
|
redisDb*: int
|
||||||
cookieHeader*: string
|
|
||||||
xCsrfToken*: string
|
|
||||||
|
|
||||||
Rss* = object
|
Rss* = object
|
||||||
feed*, cursor*: string
|
feed*, cursor*: string
|
||||||
|
|
|
@ -38,9 +38,8 @@ proc renderProfileTabs*(query: Query; username: string; cfg: Config): VNode =
|
||||||
a(href=(link & "/with_replies")): text "Tweets & Replies"
|
a(href=(link & "/with_replies")): text "Tweets & Replies"
|
||||||
li(class=query.getTabClass(media)):
|
li(class=query.getTabClass(media)):
|
||||||
a(href=(link & "/media")): text "Media"
|
a(href=(link & "/media")): text "Media"
|
||||||
if len(cfg.xCsrfToken) != 0 and len(cfg.cookieHeader) != 0:
|
li(class=query.getTabClass(favorites)):
|
||||||
li(class=query.getTabClass(favorites)):
|
a(href=(link & "/favorites")): text "Likes"
|
||||||
a(href=(link & "/favorites")): text "Likes"
|
|
||||||
li(class=query.getTabClass(tweets)):
|
li(class=query.getTabClass(tweets)):
|
||||||
a(href=(link & "/search")): text "Search"
|
a(href=(link & "/search")): text "Search"
|
||||||
|
|
||||||
|
|
36
twitter_oauth.sh
Normal file
36
twitter_oauth.sh
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
#!/bin/bash
|
||||||
|
# Grab oauth token for use with Nitter (requires Twitter account).
|
||||||
|
# results: {"oauth_token":"xxxxxxxxxx-xxxxxxxxx","oauth_token_secret":"xxxxxxxxxxxxxxxxxxxxx"}
|
||||||
|
|
||||||
|
username=""
|
||||||
|
password=""
|
||||||
|
|
||||||
|
if [[ -z "$username" || -z "$password" ]]; then
|
||||||
|
echo "needs username and password"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
bearer_token='AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F'
|
||||||
|
guest_token=$(curl -s -XPOST https://api.twitter.com/1.1/guest/activate.json -H "Authorization: Bearer ${bearer_token}" | jq -r '.guest_token')
|
||||||
|
base_url='https://api.twitter.com/1.1/onboarding/task.json'
|
||||||
|
header=(-H "Authorization: Bearer ${bearer_token}" -H "User-Agent: TwitterAndroid/10.21.1" -H "Content-Type: application/json" -H "X-Guest-Token: ${guest_token}")
|
||||||
|
|
||||||
|
# start flow
|
||||||
|
flow_1=$(curl -si -XPOST "${base_url}?flow_name=login" "${header[@]}")
|
||||||
|
|
||||||
|
# get 'att', now needed in headers, and 'flow_token' from flow_1
|
||||||
|
att=$(sed -En 's/^att: (.*)\r/\1/p' <<< "${flow_1}")
|
||||||
|
flow_token=$(sed -n '$p' <<< "${flow_1}" | jq -r .flow_token)
|
||||||
|
|
||||||
|
# username
|
||||||
|
token_2=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||||
|
-d '{"flow_token":"'"${flow_token}"'","subtask_inputs":[{"subtask_id":"LoginEnterUserIdentifierSSO","settings_list":{"setting_responses":[{"key":"user_identifier","response_data":{"text_data":{"result":"'"${username}"'"}}}],"link":"next_link"}}]}' | jq -r .flow_token)
|
||||||
|
|
||||||
|
# password
|
||||||
|
token_3=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||||
|
-d '{"flow_token":"'"${token_2}"'","subtask_inputs":[{"enter_password":{"password":"'"${password}"'","link":"next_link"},"subtask_id":"LoginEnterPassword"}]}' | jq -r .flow_token)
|
||||||
|
|
||||||
|
# finally print oauth_token and secret
|
||||||
|
curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||||
|
-d '{"flow_token":"'"${token_3}"'","subtask_inputs":[{"check_logged_in_account":{"link":"AccountDuplicationCheck_false"},"subtask_id":"AccountDuplicationCheck"}]}' | \
|
||||||
|
jq -c '.subtasks[0]|if(.open_account) then {oauth_token: .open_account.oauth_token, oauth_token_secret: .open_account.oauth_token_secret} else empty end'
|
Loading…
Reference in a new issue