Compare commits
3 commits
9fa03b99a2
...
29e4b240d6
Author | SHA1 | Date | |
---|---|---|---|
29e4b240d6 | |||
3bff6ec415 | |||
5c6b229500 |
10 changed files with 281 additions and 100 deletions
33
public/css/themes/src.css
Normal file
33
public/css/themes/src.css
Normal file
|
@ -0,0 +1,33 @@
|
|||
body {
|
||||
--bg_color: #4C5844;
|
||||
--fg_color: #CFCBC2;
|
||||
--fg_faded: #999;
|
||||
--fg_dark: var(--fg_faded);
|
||||
--fg_nav: var(--accent);
|
||||
--bg_panel: #4a4945;
|
||||
--bg_elements: #2b2c2e;
|
||||
--bg_overlays: #232323;
|
||||
--bg_hover: #2A2A2A;
|
||||
--grey: var(--fg_faded);
|
||||
--dark_grey: #44475a;
|
||||
--darker_grey: #3d4051;
|
||||
--darkest_grey: #363948;
|
||||
--border_grey: #666;
|
||||
--accent: #8bc540;
|
||||
--accent_light: #97bf63;
|
||||
--accent_dark: var(--accent);
|
||||
--accent_border: #797b78;
|
||||
--play_button: #b0aeac;
|
||||
--play_button_hover: #7e9bbf;
|
||||
--more_replies_dots: #bd93f9;
|
||||
--error_red: #ff5555;
|
||||
--verified_blue: #415e81;
|
||||
--icon_text: #F8F8F2;
|
||||
--tab: #cbb784;
|
||||
--tab_selected: var(--accent);
|
||||
--profile_stat: #cbb784;
|
||||
}
|
||||
.search-bar > form input::placeholder{
|
||||
color: var(--fg_faded);
|
||||
}
|
||||
|
|
@ -1,7 +1,7 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
|
||||
import jsony, packedjson, zippy, oauth1
|
||||
import types, auth, consts, parserutils, http_pool
|
||||
import httpclient, asyncdispatch, options, strutils, uri, times, tables
|
||||
import jsony, packedjson, zippy
|
||||
import types, tokens, consts, parserutils, http_pool
|
||||
import experimental/types/common
|
||||
import config
|
||||
|
||||
|
@ -30,30 +30,12 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
|
|||
else:
|
||||
result &= ("cursor", cursor)
|
||||
|
||||
proc getOauthHeader(url, oauthToken, oauthTokenSecret: string): string =
|
||||
let
|
||||
encodedUrl = url.replace(",", "%2C").replace("+", "%20")
|
||||
params = OAuth1Parameters(
|
||||
consumerKey: consumerKey,
|
||||
signatureMethod: "HMAC-SHA1",
|
||||
timestamp: $int(round(epochTime())),
|
||||
nonce: "0",
|
||||
isIncludeVersionToHeader: true,
|
||||
token: oauthToken
|
||||
)
|
||||
signature = getSignature(HttpGet, encodedUrl, "", params, consumerSecret, oauthTokenSecret)
|
||||
|
||||
params.signature = percentEncode(signature)
|
||||
|
||||
return getOauth1RequestHeader(params)["authorization"]
|
||||
|
||||
proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
||||
let header = getOauthHeader(url, oauthToken, oauthTokenSecret)
|
||||
|
||||
proc genHeaders*(token: Token = nil): HttpHeaders =
|
||||
result = newHttpHeaders({
|
||||
"connection": "keep-alive",
|
||||
"authorization": header,
|
||||
"authorization": auth,
|
||||
"content-type": "application/json",
|
||||
"x-guest-token": if token == nil: "" else: token.tok,
|
||||
"x-twitter-active-user": "yes",
|
||||
"authority": "api.twitter.com",
|
||||
"accept-encoding": "gzip",
|
||||
|
@ -62,25 +44,29 @@ proc genHeaders*(url, oauthToken, oauthTokenSecret: string): HttpHeaders =
|
|||
"DNT": "1"
|
||||
})
|
||||
|
||||
template updateAccount() =
|
||||
template updateToken() =
|
||||
if resp.headers.hasKey(rlRemaining):
|
||||
let
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
account.setRateLimit(api, remaining, reset)
|
||||
token.setRateLimit(api, remaining, reset)
|
||||
|
||||
template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
||||
once:
|
||||
pool = HttpPool()
|
||||
|
||||
var account = await getGuestAccount(api)
|
||||
if account.oauthToken.len == 0:
|
||||
echo "[accounts] Empty oauth token, account: ", account.id
|
||||
var token = await getToken(api)
|
||||
if token.tok.len == 0:
|
||||
raise rateLimitError()
|
||||
|
||||
if len(cfg.cookieHeader) != 0:
|
||||
additional_headers.add("Cookie", cfg.cookieHeader)
|
||||
if len(cfg.xCsrfToken) != 0:
|
||||
additional_headers.add("x-csrf-token", cfg.xCsrfToken)
|
||||
|
||||
try:
|
||||
var resp: AsyncResponse
|
||||
var headers = genHeaders($url, account.oauthToken, account.oauthSecret)
|
||||
var headers = genHeaders(token)
|
||||
for key, value in additional_headers.pairs():
|
||||
headers.add(key, value)
|
||||
pool.use(headers):
|
||||
|
@ -101,7 +87,7 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
|||
let
|
||||
remaining = parseInt(resp.headers[rlRemaining])
|
||||
reset = parseInt(resp.headers[rlReset])
|
||||
account.setRateLimit(api, remaining, reset)
|
||||
token.setRateLimit(api, remaining, reset)
|
||||
|
||||
if result.len > 0:
|
||||
if resp.headers.getOrDefault("content-encoding") == "gzip":
|
||||
|
@ -111,34 +97,36 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
|
|||
let errors = result.fromJson(Errors)
|
||||
if errors in {expiredToken, badToken, authorizationError}:
|
||||
echo "fetch error: ", errors
|
||||
invalidate(account)
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
||||
elif errors in {rateLimited}:
|
||||
# rate limit hit, resets after 24 hours
|
||||
setLimited(account, api)
|
||||
#setLimited(account, api)
|
||||
raise rateLimitError()
|
||||
elif result.startsWith("429 Too Many Requests"):
|
||||
echo "[accounts] 429 error, API: ", api, ", account: ", account.id
|
||||
account.apis[api].remaining = 0
|
||||
echo "[accounts] 429 error, API: ", api, ", token: ", token[]
|
||||
#account.apis[api].remaining = 0
|
||||
# rate limit hit, resets after the 15 minute window
|
||||
raise rateLimitError()
|
||||
|
||||
fetchBody
|
||||
|
||||
release(token, used=true)
|
||||
|
||||
if resp.status == $Http400:
|
||||
raise newException(InternalError, $url)
|
||||
except InternalError as e:
|
||||
raise e
|
||||
except BadClientError as e:
|
||||
release(token, used=true)
|
||||
raise e
|
||||
except OSError as e:
|
||||
raise e
|
||||
except Exception as e:
|
||||
let id = if account.isNil: "null" else: $account.id
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url
|
||||
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
|
||||
if "length" notin e.msg and "descriptor" notin e.msg:
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
||||
finally:
|
||||
release(account)
|
||||
|
||||
template retry(bod) =
|
||||
try:
|
||||
|
@ -148,7 +136,6 @@ template retry(bod) =
|
|||
bod
|
||||
|
||||
proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[JsonNode] {.async.} =
|
||||
|
||||
retry:
|
||||
var body: string
|
||||
fetchImpl(body, additional_headers):
|
||||
|
@ -158,10 +145,12 @@ proc fetch*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders
|
|||
echo resp.status, ": ", body, " --- url: ", url
|
||||
result = newJNull()
|
||||
|
||||
updateToken()
|
||||
|
||||
let error = result.getError
|
||||
if error in {expiredToken, badToken, authorizationError}:
|
||||
echo "fetchBody error: ", error
|
||||
invalidate(account)
|
||||
if error in {expiredToken, badToken}:
|
||||
echo "fetch error: ", result.getError
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
||||
|
||||
proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHeaders()): Future[string] {.async.} =
|
||||
|
@ -170,3 +159,12 @@ proc fetchRaw*(url: Uri; api: Api; additional_headers: HttpHeaders = newHttpHead
|
|||
if not (result.startsWith('{') or result.startsWith('[')):
|
||||
echo resp.status, ": ", result, " --- url: ", url
|
||||
result.setLen(0)
|
||||
|
||||
updateToken()
|
||||
|
||||
if result.startsWith("{\"errors"):
|
||||
let errors = result.fromJson(Errors)
|
||||
if errors in {expiredToken, badToken}:
|
||||
echo "fetch error: ", errors
|
||||
release(token, invalid=true)
|
||||
raise rateLimitError()
|
|
@ -41,7 +41,9 @@ proc getConfig*(path: string): (Config, parseCfg.Config) =
|
|||
enableRss: cfg.get("Config", "enableRSS", true),
|
||||
enableDebug: cfg.get("Config", "enableDebug", false),
|
||||
proxy: cfg.get("Config", "proxy", ""),
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", "")
|
||||
proxyAuth: cfg.get("Config", "proxyAuth", ""),
|
||||
cookieHeader: cfg.get("Config", "cookieHeader", ""),
|
||||
xCsrfToken: cfg.get("Config", "xCsrfToken", "")
|
||||
)
|
||||
|
||||
return (conf, cfg)
|
||||
|
|
|
@ -2,6 +2,8 @@
|
|||
import uri, sequtils, strutils
|
||||
|
||||
const
|
||||
auth* = "Bearer AAAAAAAAAAAAAAAAAAAAAFQODgEAAAAAVHTp76lzh3rFzcHbmHVvQxYYpTw%3DckAlMINMjmCwxUcaXbAN4XqJVdgMJaHqNOFgPMK0zN1qLqLQCF"
|
||||
|
||||
consumerKey* = "3nVuSoBZnx6U4vzUxf5w"
|
||||
consumerSecret* = "Bcs59EFbbsdF6Sl9Ng71smgStWEGwXXKSjYvPVt7qys"
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ from os import getEnv
|
|||
|
||||
import jester
|
||||
|
||||
import types, config, prefs, formatters, redis_cache, http_pool, auth
|
||||
import types, config, prefs, formatters, redis_cache, http_pool
|
||||
import views/[general, about]
|
||||
import routes/[
|
||||
preferences, timeline, status, media, search, rss, list, debug,
|
||||
|
@ -16,10 +16,9 @@ import routes/[
|
|||
const instancesUrl = "https://github.com/zedeus/nitter/wiki/Instances"
|
||||
const issuesUrl = "https://github.com/zedeus/nitter/issues"
|
||||
|
||||
let
|
||||
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||
#let accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
|
||||
|
||||
initAccountPool(cfg, accountsPath)
|
||||
# initAccountPool(cfg, accountsPath)
|
||||
|
||||
if not cfg.enableDebug:
|
||||
# Silence Jester's query warning
|
||||
|
@ -51,7 +50,7 @@ createSearchRouter(cfg)
|
|||
createMediaRouter(cfg)
|
||||
createEmbedRouter(cfg)
|
||||
createRssRouter(cfg)
|
||||
createDebugRouter(cfg)
|
||||
#createDebugRouter(cfg)
|
||||
|
||||
settings:
|
||||
port = Port(cfg.port)
|
||||
|
@ -103,5 +102,5 @@ routes:
|
|||
extend preferences, ""
|
||||
extend resolver, ""
|
||||
extend embed, ""
|
||||
extend debug, ""
|
||||
#extend debug, ""
|
||||
extend unsupported, ""
|
||||
|
|
|
@ -1,13 +1,17 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import jester
|
||||
import router_utils
|
||||
import ".."/[auth, types]
|
||||
import ".."/[tokens, types]
|
||||
|
||||
proc createDebugRouter*(cfg: Config) =
|
||||
router debug:
|
||||
get "/.health":
|
||||
respJson getAccountPoolHealth()
|
||||
|
||||
get "/.accounts":
|
||||
get "/.tokens":
|
||||
cond cfg.enableDebug
|
||||
respJson getAccountPoolDebug()
|
||||
respJson getPoolJson()
|
||||
|
||||
#get "/.health":
|
||||
#respJson getAccountPoolHealth()
|
||||
|
||||
#get "/.accounts":
|
||||
#cond cfg.enableDebug
|
||||
#respJson getAccountPoolDebug()
|
||||
|
|
168
src/tokens.nim
Normal file
168
src/tokens.nim
Normal file
|
@ -0,0 +1,168 @@
|
|||
# SPDX-License-Identifier: AGPL-3.0-only
|
||||
import asyncdispatch, httpclient, times, sequtils, json, random
|
||||
import strutils, tables
|
||||
import types, consts
|
||||
|
||||
const
|
||||
maxConcurrentReqs = 5 # max requests at a time per token, to avoid race conditions
|
||||
maxLastUse = 1.hours # if a token is unused for 60 minutes, it expires
|
||||
maxAge = 2.hours + 55.minutes # tokens expire after 3 hours
|
||||
failDelay = initDuration(minutes=30)
|
||||
|
||||
var
|
||||
tokenPool: seq[Token]
|
||||
lastFailed: Time
|
||||
enableLogging = false
|
||||
|
||||
let headers = newHttpHeaders({"authorization": auth})
|
||||
|
||||
template log(str) =
|
||||
if enableLogging: echo "[tokens] ", str
|
||||
|
||||
proc getPoolJson*(): JsonNode =
|
||||
var
|
||||
list = newJObject()
|
||||
totalReqs = 0
|
||||
totalPending = 0
|
||||
reqsPerApi: Table[string, int]
|
||||
|
||||
for token in tokenPool:
|
||||
totalPending.inc(token.pending)
|
||||
list[token.tok] = %*{
|
||||
"apis": newJObject(),
|
||||
"pending": token.pending,
|
||||
"init": $token.init,
|
||||
"lastUse": $token.lastUse
|
||||
}
|
||||
|
||||
for api in token.apis.keys:
|
||||
list[token.tok]["apis"][$api] = %token.apis[api]
|
||||
|
||||
let
|
||||
maxReqs =
|
||||
case api
|
||||
of Api.photoRail: 180
|
||||
#of Api.timeline: 187
|
||||
#of Api.userTweets, Api.userTimeline: 300
|
||||
of Api.userTweets: 300
|
||||
of Api.listMembers, Api.listBySlug, Api.list, Api.listTweets,
|
||||
Api.userTweetsAndReplies, Api.userMedia,
|
||||
Api.userRestId, Api.userScreenName, Api.tweetDetail,
|
||||
Api.tweetResult, Api.search, Api.favorites,
|
||||
Api.retweeters, Api.favoriters, Api.following, Api.followers: 500
|
||||
#of Api.userSearch: 900
|
||||
reqs = maxReqs - token.apis[api].remaining
|
||||
|
||||
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
|
||||
totalReqs.inc(reqs)
|
||||
|
||||
return %*{
|
||||
"amount": tokenPool.len,
|
||||
"requests": totalReqs,
|
||||
"pending": totalPending,
|
||||
"apis": reqsPerApi,
|
||||
"tokens": list
|
||||
}
|
||||
|
||||
proc rateLimitError*(): ref RateLimitError =
|
||||
newException(RateLimitError, "rate limited")
|
||||
|
||||
proc fetchToken(): Future[Token] {.async.} =
|
||||
if getTime() - lastFailed < failDelay:
|
||||
raise rateLimitError()
|
||||
|
||||
let client = newAsyncHttpClient(headers=headers)
|
||||
|
||||
try:
|
||||
let
|
||||
resp = await client.postContent(activate)
|
||||
tokNode = parseJson(resp)["guest_token"]
|
||||
tok = tokNode.getStr($(tokNode.getInt))
|
||||
time = getTime()
|
||||
|
||||
return Token(tok: tok, init: time, lastUse: time)
|
||||
except Exception as e:
|
||||
echo "[tokens] fetching token failed: ", e.msg
|
||||
if "Try again" notin e.msg:
|
||||
echo "[tokens] fetching tokens paused, resuming in 30 minutes"
|
||||
lastFailed = getTime()
|
||||
finally:
|
||||
client.close()
|
||||
|
||||
proc expired(token: Token): bool =
|
||||
let time = getTime()
|
||||
token.init < time - maxAge or token.lastUse < time - maxLastUse
|
||||
|
||||
proc isLimited(token: Token; api: Api): bool =
|
||||
if token.isNil or token.expired:
|
||||
return true
|
||||
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
return (limit.remaining <= 10 and limit.reset > epochTime().int)
|
||||
else:
|
||||
return false
|
||||
|
||||
proc isReady(token: Token; api: Api): bool =
|
||||
not (token.isNil or token.pending > maxConcurrentReqs or token.isLimited(api))
|
||||
|
||||
proc release*(token: Token; used=false; invalid=false) =
|
||||
if token.isNil: return
|
||||
if invalid or token.expired:
|
||||
if invalid: log "discarding invalid token"
|
||||
elif token.expired: log "discarding expired token"
|
||||
|
||||
let idx = tokenPool.find(token)
|
||||
if idx > -1: tokenPool.delete(idx)
|
||||
elif used:
|
||||
dec token.pending
|
||||
token.lastUse = getTime()
|
||||
|
||||
proc getToken*(api: Api): Future[Token] {.async.} =
|
||||
for i in 0 ..< tokenPool.len:
|
||||
if result.isReady(api): break
|
||||
release(result)
|
||||
result = tokenPool.sample()
|
||||
|
||||
if not result.isReady(api):
|
||||
release(result)
|
||||
result = await fetchToken()
|
||||
log "added new token to pool"
|
||||
tokenPool.add result
|
||||
|
||||
if not result.isNil:
|
||||
inc result.pending
|
||||
else:
|
||||
raise rateLimitError()
|
||||
|
||||
proc setRateLimit*(token: Token; api: Api; remaining, reset: int) =
|
||||
# avoid undefined behavior in race conditions
|
||||
if api in token.apis:
|
||||
let limit = token.apis[api]
|
||||
if limit.reset >= reset and limit.remaining < remaining:
|
||||
return
|
||||
|
||||
token.apis[api] = RateLimit(remaining: remaining, reset: reset)
|
||||
|
||||
proc poolTokens*(amount: int) {.async.} =
|
||||
var futs: seq[Future[Token]]
|
||||
for i in 0 ..< amount:
|
||||
futs.add fetchToken()
|
||||
|
||||
for token in futs:
|
||||
var newToken: Token
|
||||
|
||||
try: newToken = await token
|
||||
except: discard
|
||||
|
||||
if not newToken.isNil:
|
||||
log "added new token to pool"
|
||||
tokenPool.add newToken
|
||||
|
||||
proc initTokenPool*(cfg: Config) {.async.} =
|
||||
enableLogging = cfg.enableDebug
|
||||
|
||||
while true:
|
||||
if tokenPool.countIt(not it.isLimited(Api.userTweets)) < cfg.minTokens:
|
||||
await poolTokens(min(4, cfg.minTokens - tokenPool.len))
|
||||
await sleepAsync(2000)
|
|
@ -38,6 +38,13 @@ type
|
|||
limited*: bool
|
||||
limitedAt*: int
|
||||
|
||||
Token* = ref object
|
||||
tok*: string
|
||||
init*: Time
|
||||
lastUse*: Time
|
||||
pending*: int
|
||||
apis*: Table[Api, RateLimit]
|
||||
|
||||
GuestAccount* = ref object
|
||||
id*: int64
|
||||
oauthToken*: string
|
||||
|
@ -274,6 +281,8 @@ type
|
|||
enableDebug*: bool
|
||||
proxy*: string
|
||||
proxyAuth*: string
|
||||
cookieHeader*: string
|
||||
xCsrfToken*: string
|
||||
|
||||
rssCacheTime*: int
|
||||
listCacheTime*: int
|
||||
|
|
|
@ -44,7 +44,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
|||
theme = req.params["theme"].toTheme
|
||||
|
||||
let ogType =
|
||||
if video.len > 0: "video"
|
||||
if video.len > 0: "video.other"
|
||||
elif rss.len > 0: "object"
|
||||
elif images.len > 0: "photo"
|
||||
else: "article"
|
||||
|
@ -107,17 +107,19 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
|
|||
|
||||
let image = getUrlPrefix(cfg) & getPicUrl(url)
|
||||
meta(property="og:image", content=image)
|
||||
meta(property="twitter:image:src", content=image)
|
||||
if video.len == 0:
|
||||
meta(property="twitter:image:src", content=image)
|
||||
|
||||
if rss.len > 0:
|
||||
meta(property="twitter:card", content="summary")
|
||||
else:
|
||||
elif video.len == 0:
|
||||
meta(property="twitter:card", content="summary_large_image")
|
||||
|
||||
if video.len > 0:
|
||||
meta(property="og:video:url", content=video)
|
||||
meta(property="og:video:secure_url", content=video)
|
||||
meta(property="og:video:type", content="text/html")
|
||||
let videoUrl = getUrlPrefix(cfg) & video
|
||||
meta(property="og:video:url", content=videoUrl)
|
||||
meta(property="og:video:secure_url", content=videoUrl)
|
||||
meta(property="og:video:type", content="video/mp4")
|
||||
|
||||
# this is last so images are also preloaded
|
||||
# if this is done earlier, Chrome only preloads one image for some reason
|
||||
|
|
|
@ -1,36 +0,0 @@
|
|||
#!/bin/bash
|
||||
# Grab oauth token for use with Nitter (requires Twitter account).
|
||||
# results: {"oauth_token":"xxxxxxxxxx-xxxxxxxxx","oauth_token_secret":"xxxxxxxxxxxxxxxxxxxxx"}
|
||||
|
||||
username=""
|
||||
password=""
|
||||
|
||||
if [[ -z "$username" || -z "$password" ]]; then
|
||||
echo "needs username and password"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
bearer_token='AAAAAAAAAAAAAAAAAAAAAFXzAwAAAAAAMHCxpeSDG1gLNLghVe8d74hl6k4%3DRUMF4xAQLsbeBhTSRrCiQpJtxoGWeyHrDb5te2jpGskWDFW82F'
|
||||
guest_token=$(curl -s -XPOST https://api.twitter.com/1.1/guest/activate.json -H "Authorization: Bearer ${bearer_token}" | jq -r '.guest_token')
|
||||
base_url='https://api.twitter.com/1.1/onboarding/task.json'
|
||||
header=(-H "Authorization: Bearer ${bearer_token}" -H "User-Agent: TwitterAndroid/10.21.1" -H "Content-Type: application/json" -H "X-Guest-Token: ${guest_token}")
|
||||
|
||||
# start flow
|
||||
flow_1=$(curl -si -XPOST "${base_url}?flow_name=login" "${header[@]}")
|
||||
|
||||
# get 'att', now needed in headers, and 'flow_token' from flow_1
|
||||
att=$(sed -En 's/^att: (.*)\r/\1/p' <<< "${flow_1}")
|
||||
flow_token=$(sed -n '$p' <<< "${flow_1}" | jq -r .flow_token)
|
||||
|
||||
# username
|
||||
token_2=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${flow_token}"'","subtask_inputs":[{"subtask_id":"LoginEnterUserIdentifierSSO","settings_list":{"setting_responses":[{"key":"user_identifier","response_data":{"text_data":{"result":"'"${username}"'"}}}],"link":"next_link"}}]}' | jq -r .flow_token)
|
||||
|
||||
# password
|
||||
token_3=$(curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${token_2}"'","subtask_inputs":[{"enter_password":{"password":"'"${password}"'","link":"next_link"},"subtask_id":"LoginEnterPassword"}]}' | jq -r .flow_token)
|
||||
|
||||
# finally print oauth_token and secret
|
||||
curl -s -XPOST "${base_url}" -H "att: ${att}" "${header[@]}" \
|
||||
-d '{"flow_token":"'"${token_3}"'","subtask_inputs":[{"check_logged_in_account":{"link":"AccountDuplicationCheck_false"},"subtask_id":"AccountDuplicationCheck"}]}' | \
|
||||
jq -c '.subtasks[0]|if(.open_account) then {oauth_token: .open_account.oauth_token, oauth_token_secret: .open_account.oauth_token_secret} else empty end'
|
Loading…
Reference in a new issue