Remove failed guest ID experiment
This commit is contained in:
parent
b03faccd45
commit
b87cd39bce
7 changed files with 9 additions and 28 deletions
|
@ -1,16 +0,0 @@
|
|||
import httpclient, strutils
|
||||
|
||||
proc getGuestId*(): string =
|
||||
let client = newHttpClient()
|
||||
for i in 0 .. 10:
|
||||
try:
|
||||
let req = client.get("https://twitter.com")
|
||||
if "react-root" in req.body: continue
|
||||
for k, v in req.headers:
|
||||
if "guest_id" in v:
|
||||
return v[v.find("=") + 1 .. v.find(";")]
|
||||
except:
|
||||
discard
|
||||
finally:
|
||||
try: client.close()
|
||||
except: discard
|
|
@ -54,7 +54,7 @@ proc getListMembers*(username, list, after, agent: string): Future[Result[Profil
|
|||
|
||||
let
|
||||
url = base / (listMembersUrl % [username, list])
|
||||
html = await fetchHtml(url, genHeaders(agent, url, guestId=true))
|
||||
html = await fetchHtml(url, genHeaders(agent, url))
|
||||
|
||||
result = Result[Profile](
|
||||
minId: html.selectAttr(".stream-container", "data-min-position"),
|
||||
|
|
|
@ -34,7 +34,7 @@ proc getProfile*(username, agent: string): Future[Profile] {.async.} =
|
|||
proc getProfileFull*(username, agent: string): Future[Profile] {.async.} =
|
||||
let
|
||||
url = base / username
|
||||
headers = genHeaders(agent, url, auth=true, guestId=true)
|
||||
headers = genHeaders(agent, url, auth=true)
|
||||
html = await fetchHtml(url, headers)
|
||||
|
||||
if html == nil: return
|
||||
|
|
|
@ -23,7 +23,7 @@ proc getSearch*[T](query: Query; after, agent: string;
|
|||
encoded = encodeUrl(param, usePlus=false)
|
||||
|
||||
referer = base / ("search?f=$1&q=$2&src=typd" % [kind, encoded])
|
||||
headers = genHeaders(agent, referer, auth=true, xml=true, guestId=true)
|
||||
headers = genHeaders(agent, referer, auth=true, xml=true)
|
||||
|
||||
params = {
|
||||
"f": kind,
|
||||
|
|
|
@ -30,7 +30,7 @@ proc getProfileAndTimeline*(username, after, agent: string;
|
|||
url = url ? {"max_position": after}
|
||||
|
||||
let
|
||||
headers = genHeaders(agent, base / username, auth=true, guestId=true)
|
||||
headers = genHeaders(agent, base / username, auth=true)
|
||||
html = await fetchHtml(url, headers)
|
||||
timeline = parseTimeline(html.select("#timeline > .stream-container"), after)
|
||||
profile = parseTimelineProfile(html)
|
||||
|
|
|
@ -8,7 +8,7 @@ proc getTweet*(username, id, after, agent: string): Future[Conversation] {.async
|
|||
headers = genHeaders({
|
||||
"pragma": "no-cache",
|
||||
"x-previous-page-name": "profile"
|
||||
}, agent, base, xml=true, guestId=true)
|
||||
}, agent, base, xml=true)
|
||||
|
||||
url = base / username / tweetUrl / id ? {"max_position": after}
|
||||
html = await fetchHtml(url, headers)
|
||||
|
|
|
@ -2,13 +2,11 @@ import httpclient, asyncdispatch, htmlparser, options
|
|||
import strutils, json, xmltree, uri
|
||||
|
||||
import ../types
|
||||
import consts, cookie
|
||||
|
||||
var guestIdCookie = "guest_id=" & getGuestId()
|
||||
import consts
|
||||
|
||||
proc genHeaders*(headers: openArray[tuple[key: string, val: string]];
|
||||
agent: string; referer: Uri; lang=true;
|
||||
auth=false; xml=false; guestId=false): HttpHeaders =
|
||||
auth=false; xml=false): HttpHeaders =
|
||||
result = newHttpHeaders({
|
||||
"referer": $referer,
|
||||
"user-agent": agent,
|
||||
|
@ -18,14 +16,13 @@ proc genHeaders*(headers: openArray[tuple[key: string, val: string]];
|
|||
if auth: result["authority"] = "twitter.com"
|
||||
if lang: result["accept-language"] = consts.lang
|
||||
if xml: result["x-requested-with"] = "XMLHttpRequest"
|
||||
# if guestId: result["cookie"] = guestIdCookie
|
||||
|
||||
for (key, val) in headers:
|
||||
result[key] = val
|
||||
|
||||
proc genHeaders*(agent: string; referer: Uri; lang=true;
|
||||
auth=false; xml=false; guestId=false): HttpHeaders =
|
||||
genHeaders([], agent, referer, lang, auth, xml, guestId)
|
||||
auth=false; xml=false): HttpHeaders =
|
||||
genHeaders([], agent, referer, lang, auth, xml)
|
||||
|
||||
template newClient*() {.dirty.} =
|
||||
var client = newAsyncHttpClient()
|
||||
|
|
Loading…
Reference in a new issue