Compare commits
No commits in common. "29e4b240d6d39655f1fed424b7fee4429df63dab" and "9fa03b99a2ff77b26859ffc65826cef0eba89e03" have entirely different histories.
29e4b240d6
...
9fa03b99a2
10 changed files with 100 additions and 281 deletions
|
@ -1,33 +0,0 @@
|
|||
body {
|
||||
--bg_color: #4C5844;
|
||||
--fg_color: #CFCBC2;
|
||||
--fg_faded: #999;
|
||||
--fg_dark: var(--fg_faded);
|
||||
--fg_nav: var(--accent);
|
||||
--bg_panel: #4a4945;
|
||||
--bg_elements: #2b2c2e;
|
||||
--bg_overlays: #232323;
|
||||
--bg_hover: #2A2A2A;
|
||||
--grey: var(--fg_faded);
|
||||
--dark_grey: #44475a;
|
||||
--darker_grey: #3d4051;
|
||||
--darkest_grey: #363948;
|
||||
--border_grey: #666;
|
||||
--accent: #8bc540;
|
||||
--accent_light: #97bf63;
|
||||
--accent_dark: var(--accent);
|
||||
--accent_border: #797b78;
|
||||
--play_button: #b0aeac;
|
||||
--play_button_hover: #7e9bbf;
|
||||
--more_replies_dots: #bd93f9;
|
||||
--error_red: #ff5555;
|
||||
--verified_blue: #415e81;
|
||||
--icon_text: #F8F8F2;
|
||||
--tab: #cbb784;
|
||||
--tab_selected: var(--accent);
|
||||
--profile_stat: #cbb784;
|
||||
}
|
||||
.search-bar > form input::placeholder{
|
||||
color: var(--fg_faded);
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, tables
|
||||
import jsony, packedjson, zippy
|
||||
import types, tokens, consts, parserutils, http_pool
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
|
||||
import jsony, packedjson, zippy, oauth1
|
||||
import types, auth, consts, parserutils, http_pool
|
||||
import experimental/types/common
|
||||
import config
|
||||
|
||||
|
@ -30,12 +30,30 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
|||
else:
|
||||
result &= ("cursor", cursor)
|
||||
|
||||
proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||
let
|
||||
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
|
||||
params = OAuth1Parameters(
|
||||
consumerKey: consumerKey,
|
||||
signatureMethod: "HMAC-SHA1",
|
||||
timestamp: $int(round(epochTime())),
|
||||
nonce: "0",
|
||||
isIncludeVersionToHeader: true,
|
||||
token: oauthToken
|
||||
)
|
||||
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
|
||||
|
||||
params.signature = percentEncode(signature)
|
||||
|
||||
return getOauth1RequestHeader(params)["authorization"]
|
||||
|
||||
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
||||
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
||||
|
||||
result = newHttpHeaders({
|
||||
"connection": "keep-alive",
|
||||
"authorization": auth,
|
||||
"authorization": header,
|
||||
"content-type": "application/json",
|
||||
"x-guest-token": if token == nil: "" else: token.tok,
|
||||
"x-twitter-active-user": "yes",
|
||||
"authority": "api.twitter.com",
|
||||
"accept-encoding": "gzip",
|
||||
|
@ -44,29 +62,25 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
|
|||
"DNT": "1"
|
||||
})
|
||||
|
||||
template updateToken() =
|
||||
template updateAccount() =
|
||||
if resp.headers.hasKey(rlRemaining):
|
||||
let
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
token.setRateLimit(api, remaining, reset)
|
||||
account.setRateLimit(api, remaining, reset)
|
||||
|
||||
template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
||||
once:
|
||||
pool = HttpPool()
|
||||
|
||||
var token = await getToken(api)
|
||||
if token.tok.len == 0:
|
||||
var account = await getGuestAccount(api)
|
||||
if account.oauthToken.len == 0:
|
||||
echo "[accounts] Empty oauth token, account: ", account.id
|
||||
raise rateLimitError()
|
||||
|
||||
if len(cfg.cookieHeader) != 0:
|
||||
additional_headers.add("Cookie", cfg.cookieHeader)
|
||||
if len(cfg.xCsrfToken) != 0:
|
||||
additional_headers.add("x-csrf-token", cfg.xCsrfToken)
|
||||
|
||||
try:
|
||||
var resp: AsyncResponse
|
||||
var headers = genHeaders(token)
|
||||
var headers = genHeaders($url, account.oauthToken, account.oauthSecret)
|
||||
for key, value in additional_headers.pairs():
|
||||
headers.add(key, value)
|
||||
pool.use(headers):
|
||||
|
@ -87,7 +101,7 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
|||
let
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
token.setRateLimit(api, remaining, reset)
|
||||
account.setRateLimit(api, remaining, reset)
|
||||
|
||||
if result.len > 0:
|
||||
if resp.headers.getOrDefault("content-encoding") == "gzip":
|
||||
|
@ -97,36 +111,34 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
|||
let errors = result.fromJson(Errors)
|
||||
if errors in {expiredToken, badToken, authorizationError}:
|
||||
echo "fetch error: ", errors
|
||||
release(token, invalid=true)
|
||||
invalidate(account)
|
||||
raise rateLimitError()
|
||||
elif errors in {rateLimited}:
|
||||
# rate limit hit, resets after 24 hours
|
||||
#setLimited(account, api)
|
||||
setLimited(account, api)
|
||||
raise rateLimitError()
|
||||
elif result.startsWith("429 Too Many Requests"):
|
||||
echo "[accounts] 429 error, API: ", api, ", token: ", token[]
|
||||
#account.apis[api].remaining = 0
|
||||
echo "[accounts] 429 error, API: ", api, ", account: ", account.id
|
||||
account.apis[api].remaining = 0
|
||||
# rate limit hit, resets after the 15 minute window
|
||||
raise rateLimitError()
|
||||
|
||||
fetchBody
|
||||
|
||||
release(token, used=true)
|
||||
|
||||
if resp.status == $Http400:
|
||||
raise newException(InternalError, $url)
|
||||
except InternalError as e:
|
||||
raise e
|
||||
except BadClientError as e:
|
||||
release(token, used=true)
|
||||
raise e
|
||||
except OSError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
|
||||
if "length" notin e.msg and "descriptor" notin e.msg:
|
||||
release(token, invalid=true)
|
||||
let id = if account.isNil: "null" else: $account.id
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url
|
||||
raise rateLimitError()
|
||||
finally:
|
||||
release(account)
|
||||
|
||||
template retry(bod) =
|
||||
try:
|
||||
|
@ -136,6 +148,7 @@ template retry(bod) =
|
|||
bod
|
||||
|
||||
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
||||
|
||||
retry:
|
||||
var body: string
|
||||
fetchImpl(body, additional_headers):
|
||||
|
@ -145,12 +158,10 @@ proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders
|
|||
echo resp.status, ": ", body, " --- url: ", url
|
||||
result = newJNull()
|
||||
|
||||
updateToken()
|
||||
|
||||
let error = result.getError
|
||||
if error in {expiredToken, badToken}:
|
||||
echo "fetch error: ", result.getError
|
||||
release(token, invalid=true)
|
||||
if error in {expiredToken, badToken, authorizationError}:
|
||||
echo "fetchBody error: ", error
|
||||
invalidate(account)
|
||||
raise rateLimitError()
|
||||
|
||||
proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[string] {.async.} =
|
||||
|
@ -159,12 +170,3 @@ proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHead
|
|||
if not (result.startsWith('{') or result.startsWith('[')):
|
||||
echo resp.status, ": ", result, " --- url: ", url
|
||||
result.setLen(0)
|
||||
|
||||
updateToken()
|
||||
|
||||
if result.startsWith("{\"errors"):
|
||||
let errors = result.fromJson(Errors)
|
||||
if errors in {expiredToken, badToken}:
|
||||
echo "fetch error: ", errors
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
|
@ -41,9 +41,7 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
|
|||
enableRss: cfg.get("Config", "enableRSS", true),
|
||||
enableDebug: cfg.get("Config", "enableDebug", false),
|
||||
proxy: cfg.get("Config", "proxy", ""),
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", ""),
|
||||
cookieHeader: cfg.get("Config", "cookieHeader", ""),
|
||||
xCsrfToken: cfg.get("Config", "xCsrfToken", "")
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", "")
|
||||
)
|
||||
|
||||
return (conf, cfg)
|
||||
|
|
|
@ -2,8 +2,6 @@
|
|||
import uri, sequtils, strutils
|
||||
|
||||
const
|
||||
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
|
||||
|
||||
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
|
||||
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from os import getEnv
|
|||
|
||||
import jester
|
||||
|
||||
import types, config, prefs, formatters, redis_cache, http_pool
|
||||
import types, config, prefs, formatters, redis_cache, http_pool, auth
|
||||
import views/[general, about]
|
||||
import routes/[
|
||||
preferences, timeline, status, media, search, rss, list, debug,
|
||||
|
@ -16,9 +16,10 @@ import routes/[
|
|||
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
|
||||
const issuesUrl = "https://github.com/zedeus/nitter/issues"
|
||||
|
||||
#let accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||
let
|
||||
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||
|
||||
# initAccountPool(cfg, accountsPath)
|
||||
initAccountPool(cfg, accountsPath)
|
||||
|
||||
if not cfg.enableDebug:
|
||||
# Silence Jester's query warning
|
||||
|
@ -50,7 +51,7 @@ createSearchRouter(cfg)
|
|||
createMediaRouter(cfg)
|
||||
createEmbedRouter(cfg)
|
||||
createRssRouter(cfg)
|
||||
#createDebugRouter(cfg)
|
||||
createDebugRouter(cfg)
|
||||
|
||||
settings:
|
||||
port = Port(cfg.port)
|
||||
|
@ -102,5 +103,5 @@ routes:
|
|||
extend preferences, ""
|
||||
extend resolver, ""
|
||||
extend embed, ""
|
||||
#extend debug, ""
|
||||
extend debug, ""
|
||||
extend unsupported, ""
|
||||
|
|
|
@ -1,17 +1,13 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import jester
|
||||
import router_utils
|
||||
import ".."/[tokens, types]
|
||||
import ".."/[auth, types]
|
||||
|
||||
proc createDebugRouter*(cfg: Config) =
|
||||
router debug:
|
||||
get "/.tokens":
|
||||
get "/.health":
|
||||
respJson getAccountPoolHealth()
|
||||
|
||||
get "/.accounts":
|
||||
cond cfg.enableDebug
|
||||
respJson getPoolJson()
|
||||
|
||||
#get "/.health":
|
||||
#respJson getAccountPoolHealth()
|
||||
|
||||
#get "/.accounts":
|
||||
#cond cfg.enableDebug
|
||||
#respJson getAccountPoolDebug()
|
||||
respJson getAccountPoolDebug()
|
||||
|
|
168
src/tokens.nim
168
src/tokens.nim
|
@ -1,168 +0,0 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, httpclient, times, sequtils, json, random
|
||||
import strutils, tables
|
||||
import types, consts
|
||||
|
||||
const
|
||||
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
|
||||
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
|
||||
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
|
||||
failDelay = initDuration(minutes=30)
|
||||
|
||||
var
|
||||
tokenPool: seq[Token]
|
||||
lastFailed: Time
|
||||
enableLogging = false
|
||||
|
||||
let headers = newHttpHeaders({"authorization": auth})
|
||||
|
||||
template log(str) =
|
||||
if enableLogging: echo "[tokens] ", str
|
||||
|
||||
proc getPoolJson*(): JsonNode =
|
||||
var
|
||||
list = newJObject()
|
||||
totalReqs = 0
|
||||
totalPending = 0
|
||||
reqsPerApi: Table[string, int]
|
||||
|
||||
for token in tokenPool:
|
||||
totalPending.inc(token.pending)
|
||||
list[token.tok] = %*{
|
||||
"apis": newJObject(),
|
||||
"pending": token.pending,
|
||||
"init": $token.init,
|
||||
"lastUse": $token.lastUse
|
||||
}
|
||||
|
||||
for api in token.apis.keys:
|
||||
list[token.tok]["apis"][$api] = %token.apis[api]
|
||||
|
||||
let
|
||||
maxReqs =
|
||||
case api
|
||||
of Api.photoRail: 180
|
||||
#of Api.timeline: 187
|
||||
#of Api.userTweets, Api.userTimeline: 300
|
||||
of Api.userTweets: 300
|
||||
of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets,
|
||||
Api.userTweetsAndReplies, Api.userMedia,
|
||||
Api.userRestId, Api.userScreenName, Api.tweetDetail,
|
||||
Api.tweetResult, Api.search, Api.favorites,
|
||||
Api.retweeters, Api.favoriters, Api.following, Api.followers: 500
|
||||
#of Api.userSearch: 900
|
||||
reqs = maxReqs - token.apis[api].remaining
|
||||
|
||||
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
|
||||
totalReqs.inc(reqs)
|
||||
|
||||
return %*{
|
||||
"amount": tokenPool.len,
|
||||
"requests": totalReqs,
|
||||
"pending": totalPending,
|
||||
"apis": reqsPerApi,
|
||||
"tokens": list
|
||||
}
|
||||
|
||||
proc rateLimitError*(): ref RateLimitError =
|
||||
newException(RateLimitError, "rate limited")
|
||||
|
||||
proc fetchToken(): Future[Token] {.async.} =
|
||||
if getTime() - lastFailed < failDelay:
|
||||
raise rateLimitError()
|
||||
|
||||
let client = newAsyncHttpClient(headers=headers)
|
||||
|
||||
try:
|
||||
let
|
||||
resp = await client.postContent(activate)
|
||||
tokNode = parseJson(resp)["guest_token"]
|
||||
tok = tokNode.getStr($(tokNode.getInt))
|
||||
time = getTime()
|
||||
|
||||
return Token(tok: tok, init: time, lastUse: time)
|
||||
except Exception as e:
|
||||
echo "[tokens] fetching token failed: ", e.msg
|
||||
if "Try again" notin e.msg:
|
||||
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
|
||||
lastFailed = getTime()
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
proc expired(token: Token): bool =
|
||||
let time = getTime()
|
||||
token.init < time - maxAge or token.lastUse < time - maxLastUse
|
||||
|
||||
proc isLimited(token: Token; api: Api): bool =
|
||||
if token.isNil or token.expired:
|
||||
return true
|
||||
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
return (limit.remaining <= 10 and limit.reset > epochTime().int)
|
||||
else:
|
||||
return false
|
||||
|
||||
proc isReady(token: Token; api: Api): bool =
|
||||
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
|
||||
|
||||
proc release*(token: Token; used=false; invalid=false) =
|
||||
if token.isNil: return
|
||||
if invalid or token.expired:
|
||||
if invalid: log "discarding invalid token"
|
||||
elif token.expired: log "discarding expired token"
|
||||
|
||||
let idx = tokenPool.find(token)
|
||||
if idx > -1: tokenPool.delete(idx)
|
||||
elif used:
|
||||
dec token.pending
|
||||
token.lastUse = getTime()
|
||||
|
||||
proc getToken*(api: Api): Future[Token] {.async.} =
|
||||
for i in 0 ..< tokenPool.len:
|
||||
if result.isReady(api): break
|
||||
release(result)
|
||||
result = tokenPool.sample()
|
||||
|
||||
if not result.isReady(api):
|
||||
release(result)
|
||||
result = await fetchToken()
|
||||
log "added new token to pool"
|
||||
tokenPool.add result
|
||||
|
||||
if not result.isNil:
|
||||
inc result.pending
|
||||
else:
|
||||
raise rateLimitError()
|
||||
|
||||
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
|
||||
# avoid undefined behavior in race conditions
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
if limit.reset >= reset and limit.remaining < remaining:
|
||||
return
|
||||
|
||||
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
||||
|
||||
proc poolTokens*(amount: int) {.async.} =
|
||||
var futs: seq[Future[Token]]
|
||||
for i in 0 ..< amount:
|
||||
futs.add fetchToken()
|
||||
|
||||
for token in futs:
|
||||
var newToken: Token
|
||||
|
||||
try: newToken = await token
|
||||
except: discard
|
||||
|
||||
if not newToken.isNil:
|
||||
log "added new token to pool"
|
||||
tokenPool.add newToken
|
||||
|
||||
proc initTokenPool*(cfg: Config) {.async.} =
|
||||
enableLogging = cfg.enableDebug
|
||||
|
||||
while true:
|
||||
if tokenPool.countIt(not it.isLimited(Api.userTweets)) < cfg.minTokens:
|
||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
||||
await sleepAsync(2000)
|
|
@ -38,13 +38,6 @@ type
|
|||
limited*: bool
|
||||
limitedAt*: int
|
||||
|
||||
Token* = ref object
|
||||
tok*: string
|
||||
init*: Time
|
||||
lastUse*: Time
|
||||
pending*: int
|
||||
apis*: Table[Api, RateLimit]
|
||||
|
||||
GuestAccount* = ref object
|
||||
id*: int64
|
||||
oauthToken*: string
|
||||
|
@ -281,8 +274,6 @@ type
|
|||
enableDebug*: bool
|
||||
proxy*: string
|
||||
proxyAuth*: string
|
||||
cookieHeader*: string
|
||||
xCsrfToken*: string
|
||||
|
||||
rssCacheTime*: int
|
||||
listCacheTime*: int
|
||||
|
|
|
@ -44,7 +44,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
|||
theme = req.params["theme"].toTheme
|
||||
|
||||
let ogType =
|
||||
if video.len > 0: "video.other"
|
||||
if video.len > 0: "video"
|
||||
elif rss.len > 0: "object"
|
||||
elif images.len > 0: "photo"
|
||||
else: "article"
|
||||
|
@ -107,19 +107,17 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
|||
|
||||
let image = getUrlPrefix(cfg) & getPicUrl(url)
|
||||
meta(property="og:image", content=image)
|
||||
if video.len == 0:
|
||||
meta(property="twitter:image:src", content=image)
|
||||
|
||||
if rss.len > 0:
|
||||
meta(property="twitter:card", content="summary")
|
||||
elif video.len == 0:
|
||||
else:
|
||||
meta(property="twitter:card", content="summary_large_image")
|
||||
|
||||
if video.len > 0:
|
||||
let videoUrl = getUrlPrefix(cfg) & video
|
||||
meta(property="og:video:url", content=videoUrl)
|
||||
meta(property="og:video:secure_url", content=videoUrl)
|
||||
meta(property="og:video:type", content="video/mp4")
|
||||
meta(property="og:video:url", content=video)
|
||||
meta(property="og:video:secure_url", content=video)
|
||||
meta(property="og:video:type", content="text/html")
|
||||
|
||||
# this is last so images are also preloaded
|
||||
# if this is done earlier, Chrome only preloads one image for some reason
|
||||
|
|
36
twitter_oauth.sh
Normal file
36
twitter_oauth.sh
Normal file
|
@ -0,0 +1,36 @@
|
|||
#!/bin/bash
|
||||
# Grab oauth token for use with Nitter (requires Twitter account).
|
||||
# results: {"oauth_token":"xxxxxxxxxx-xxxxxxxxx","oauth_token_secret":"xxxxxxxxxxxxxxxxxxxxx"}
|
||||
|
||||
username=""
|
||||
password=""
|
||||
|
||||
if [[ -z "$username" || -z "$password" ]]; then
|
||||
echo "needs username and password"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bearer_token='AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F'
|
||||
guest_token=$(curl -s -XPOST https://api.twitter.com/1.1/guest/activate.json -H "Authorization: Bearer ${bearer_token}" | jq -r '.guest_token')
|
||||
base_url='https://api.twitter.com/1.1/onboarding/task.json'
|
||||
header=(-H "Authorization: Bearer ${bearer_token}" -H "User-Agent: TwitterAndroid/10.21.1" -H "Content-Type: application/json" -H "X-Guest-Token: ${guest_token}")
|
||||
|
||||
# start flow
|
||||
flow_1=$(curl -si -XPOST "${base_url}?flow_name=login" "${header[@]}")
|
||||
|
||||
# get 'att', now needed in headers, and 'flow_token' from flow_1
|
||||
att=$(sed -En 's/^att: (.*)\r/\1/p' <<< "${flow_1}")
|
||||
flow_token=$(sed -n '$p' <<< "${flow_1}" | jq -r .flow_token)
|
||||
|
||||
# username
|
||||
token_2=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${flow_token}"'","subtask_inputs":[{"subtask_id":"LoginEnterUserIdentifierSSO","settings_list":{"setting_responses":[{"key":"user_identifier","response_data":{"text_data":{"result":"'"${username}"'"}}}],"link":"next_link"}}]}' | jq -r .flow_token)
|
||||
|
||||
# password
|
||||
token_3=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${token_2}"'","subtask_inputs":[{"enter_password":{"password":"'"${password}"'","link":"next_link"},"subtask_id":"LoginEnterPassword"}]}' | jq -r .flow_token)
|
||||
|
||||
# finally print oauth_token and secret
|
||||
curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${token_3}"'","subtask_inputs":[{"check_logged_in_account":{"link":"AccountDuplicationCheck_false"},"subtask_id":"AccountDuplicationCheck"}]}' | \
|
||||
jq -c '.subtasks[0]|if(.open_account) then {oauth_token: .open_account.oauth_token, oauth_token_secret: .open_account.oauth_token_secret} else empty end'
|
Loading…
Reference in a new issue