Skip to content

Commit

Permalink
GraphQL timeline (#812)
Browse files Browse the repository at this point in the history
* Update deps

* Replace profile timeline with GraphQL endpoint

* Update GraphQL endpoint versions

* Use GraphQL for profile media tab

* Fix UserByRestId request

* Improve routing, fixes #814

* Fix token pool JSON

* Deduplicate GraphQL timeline endpoints

* Update list endpoints

* Use GraphQL for list tweets

* Remove debug leftover

* Replace old pinned tweet endpoint with GraphQL

* Validate tweet ID

* Minor token handling fix

* Hide US-only commerce cards

* Update config example

* Remove http pool and gzip from token pool

* Support tombstoned tweets in threads

* Retry GraphQL timeout errors

* Remove unnecessary 401 retry

* Remove broken timeout retry

* Update karax, use new bool attribute feature

* Update card test

* Fix odd edgecase with broken retweets

* Replace search endpoints, switch Bearer token

* Only parse user search if it's a list

* Fix quoted tweet crash

* Fix empty search query handling

* Fix invalid user search errors again
  • Loading branch information
zedeus committed Apr 21, 2023
1 parent e2560dc commit 1ac389e
Show file tree
Hide file tree
Showing 29 changed files with 405 additions and 301 deletions.
18 changes: 9 additions & 9 deletions nitter.example.conf
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
[Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0"
port = 8080
https = false # disable to enable cookies when not using https
httpMaxConnections = 100
staticDir = "./public"
title = "nitter"
hostname = "nitter.net"

[Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379
redisPassword = ""
redisConnections = 20 # connection pool size
redisConnections = 20 # minimum open connections in pool
redisMaxConnections = 30
# max, new connections are opened when none are available, but if the pool size
# new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second

[Config]
hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints
enableDebug = false # enable request logs and debug endpoints (/.tokens)
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = ""
tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests,
# but they expire after ~1 hour, and have a limit of 187 requests.
# the limit gets reset every 15 minutes, and the pool is filled up so there's
# always at least $tokenCount usable tokens. again, only increase this if
# you receive major bursts all the time
# but they expire after ~1 hour, and have a limit of 500 requests per endpoint.
# the limits reset every 15 minutes, and the pool is filled up so there's
# always at least `tokenCount` usable tokens. only increase this if you receive
# major bursts all the time and don't have a rate limiting setup via e.g. nginx

# Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences]
Expand Down
2 changes: 1 addition & 1 deletion nitter.nimble
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ bin = @["nitter"]

requires "nim >= 1.4.8"
requires "jester#baca3f"
requires "karax#9ee695b"
requires "karax#5cf360c"
requires "sass#7dfdd03"
requires "nimcrypto#4014ef9"
requires "markdown#158efe3"
Expand Down
156 changes: 87 additions & 69 deletions src/api.nim
Original file line number Diff line number Diff line change
Expand Up @@ -7,106 +7,82 @@ import experimental/parser as newParser
proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
variables = """{
"screen_name": "$1",
"withSafetyModeUserFields": false,
"withSuperFollowsUserFields": false
}""" % [username]
js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
variables = %*{"screen_name": username}
params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUser ? params, Api.userScreenName)
result = parseGraphUser(js)

proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return
let
variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id]
js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId)
variables = %*{"userId": id}
params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUserById ? params, Api.userRestId)
result = parseGraphUser(js)

proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = userTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
(url, apiId) = case kind
of TimelineKind.tweets: (graphUserTweets, Api.userTweets)
of TimelineKind.replies: (graphUserTweetsAndReplies, Api.userTweetsAndReplies)
of TimelineKind.media: (graphUserMedia, Api.userMedia)
js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after)

proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = listTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphListTweets ? params, Api.listTweets)
result = parseGraphTimeline(js, "list", after)

proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
url = graphListBySlug ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.listBySlug))
variables = %*{"screenName": name, "listSlug": list}
params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(graphListBySlug ? params, Api.listBySlug))

proc getGraphList*(id: string): Future[List] {.async.} =
let
variables = %*{"listId": id, "withHighlightedLabel": false}
url = graphList ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.list))
variables = %*{"listId": id}
params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(graphListById ? params, Api.list))

proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return
var
variables = %*{
"listId": list.id,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withSuperFollowsTweetFields": false
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphListMembers ? {"variables": $variables}
let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)

proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return
let
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
url = timeline / (id & ".json") ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))

proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTimeline
fetchFunc = fetch

let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)

let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
variables = tweetResultVariables % id
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweetResult ? params, Api.tweetResult)
result = parseGraphTweetResult(js)

proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = tweetVariables % [id, cursor]
params = {"variables": variables, "features": tweetFeatures}
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweet ? params, Api.tweetDetail)
result = parseGraphConversation(js, id)

Expand All @@ -119,9 +95,51 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0:
result.replies = await getReplies(id, after)

proc getStatus*(id: string): Future[Tweet] {.async.} =
let url = status / (id & ".json") ? genParams()
result = parseStatus(await fetch(url, Api.status))
proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[Tweet](query: query, beginning: true)

var
variables = %*{
"rawQuery": q,
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after)
result.query = query

proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)

var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}

result = parseUsers(await fetchRaw(url, Api.userSearch))
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)

proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))

proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0)
Expand Down
20 changes: 9 additions & 11 deletions src/apiutils.nim
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p
if ext:
result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "true")
result &= ("include_ext_alt_text", "1")
result &= ("include_ext_media_availability", "1")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:
Expand All @@ -44,7 +44,7 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
})

template updateToken() =
if api != Api.search and resp.headers.hasKey(rlRemaining):
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
Expand All @@ -67,14 +67,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =

getContent()

# Twitter randomly returns 401 errors with an empty body quite often.
# Retrying the request usually works.
if resp.status == "401 Unauthorized" and result.len == 0:
getContent()

if resp.status == $Http503:
badClient = true
raise newException(InternalError, result)
if resp.status == $Http503:
badClient = true
raise newException(BadClientError, "Bad client")

if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
Expand All @@ -90,6 +85,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =
raise newException(InternalError, $url)
except InternalError as e:
raise e
except BadClientError as e:
release(token, used=true)
raise e
except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
Expand Down
Loading

0 comments on commit 1ac389e

Please sign in to comment.