Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

GraphQL timeline #812

Merged
merged 31 commits into from
Apr 21, 2023
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
Show all changes
31 commits
Select commit Hold shift + click to select a range
ae03170
Update deps
zedeus Mar 19, 2023
8fc3c3d
Replace profile timeline with GraphQL endpoint
zedeus Mar 21, 2023
061694a
Update GraphQL endpoint versions
zedeus Mar 22, 2023
a5826a3
Use GraphQL for profile media tab
zedeus Mar 22, 2023
482b2da
Fix UserByRestId request
zedeus Mar 22, 2023
91c6d59
Improve routing, fixes #814
zedeus Mar 22, 2023
a496616
Fix token pool JSON
zedeus Mar 22, 2023
1f10c1f
Deduplicate GraphQL timeline endpoints
zedeus Mar 22, 2023
56f1ad4
Update list endpoints
zedeus Mar 22, 2023
4332fae
Use GraphQL for list tweets
zedeus Mar 22, 2023
bed060f
Remove debug leftover
zedeus Mar 23, 2023
5676ecc
Replace old pinned tweet endpoint with GraphQL
zedeus Mar 24, 2023
61d65dc
Validate tweet ID
zedeus Mar 25, 2023
1f9d500
Minor token handling fix
zedeus Mar 25, 2023
ec59942
Hide US-only commerce cards
zedeus Mar 26, 2023
2ed1f63
Update config example
zedeus Mar 27, 2023
b2580ed
Remove http pool and gzip from token pool
zedeus Mar 27, 2023
892356e
Support tombstoned tweets in threads
zedeus Mar 27, 2023
bb6f8a2
Retry GraphQL timeout errors
zedeus Mar 27, 2023
f0e1cb5
Remove unnecessary 401 retry
zedeus Mar 27, 2023
bb221fb
Remove broken timeout retry
zedeus Mar 27, 2023
c8bc02c
Update karax, use new bool attribute feature
zedeus Mar 28, 2023
ef2ecb4
Merge branch 'master' into graphql-timeline
zedeus Mar 28, 2023
9cdd419
Merge branch 'master' into graphql-timeline
zedeus Mar 28, 2023
64741de
Update card test
zedeus Mar 28, 2023
34363a2
Fix odd edgecase with broken retweets
zedeus Apr 1, 2023
5dd85c6
Replace search endpoints, switch Bearer token
zedeus Apr 21, 2023
c8e8ea3
Only parse user search if it's a list
zedeus Apr 21, 2023
10a912d
Fix quoted tweet crash
zedeus Apr 21, 2023
376b444
Fix empty search query handling
zedeus Apr 21, 2023
2f3ee72
Fix invalid user search errors again
zedeus Apr 21, 2023
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Jump to
Jump to file
Failed to load files.
Diff view
Diff view
18 changes: 9 additions & 9 deletions nitter.example.conf
Original file line number Diff line number Diff line change
@@ -1,37 +1,37 @@
[Server]
hostname = "nitter.net" # for generating links, change this to your own domain/ip
title = "nitter"
address = "0.0.0.0"
port = 8080
https = false # disable to enable cookies when not using https
httpMaxConnections = 100
staticDir = "./public"
title = "nitter"
hostname = "nitter.net"

[Cache]
listMinutes = 240 # how long to cache list info (not the tweets, so keep it high)
rssMinutes = 10 # how long to cache rss queries
redisHost = "localhost" # Change to "nitter-redis" if using docker-compose
redisPort = 6379
redisPassword = ""
redisConnections = 20 # connection pool size
redisConnections = 20 # minimum open connections in pool
redisMaxConnections = 30
# max, new connections are opened when none are available, but if the pool size
# new connections are opened when none are available, but if the pool size
# goes above this, they're closed when released. don't worry about this unless
# you receive tons of requests per second

[Config]
hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints
enableDebug = false # enable request logs and debug endpoints (/.tokens)
proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = ""
tokenCount = 10
# minimum amount of usable tokens. tokens are used to authorize API requests,
# but they expire after ~1 hour, and have a limit of 187 requests.
# the limit gets reset every 15 minutes, and the pool is filled up so there's
# always at least $tokenCount usable tokens. again, only increase this if
# you receive major bursts all the time
# but they expire after ~1 hour, and have a limit of 500 requests per endpoint.
# the limits reset every 15 minutes, and the pool is filled up so there's
# always at least `tokenCount` usable tokens. only increase this if you receive
# major bursts all the time and don't have a rate limiting setup via e.g. nginx

# Change default preferences here, see src/prefs_impl.nim for a complete list
[Preferences]
Expand Down
2 changes: 1 addition & 1 deletion nitter.nimble
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ bin = @["nitter"]

requires "nim >= 1.4.8"
requires "jester#baca3f"
requires "karax#9ee695b"
requires "karax#5cf360c"
requires "sass#7dfdd03"
requires "nimcrypto#4014ef9"
requires "markdown#158efe3"
Expand Down
156 changes: 87 additions & 69 deletions src/api.nim
Original file line number Diff line number Diff line change
Expand Up @@ -7,106 +7,82 @@ import experimental/parser as newParser
proc getGraphUser*(username: string): Future[User] {.async.} =
if username.len == 0: return
let
variables = """{
"screen_name": "$1",
"withSafetyModeUserFields": false,
"withSuperFollowsUserFields": false
}""" % [username]
js = await fetchRaw(graphUser ? {"variables": variables}, Api.userScreenName)
variables = %*{"screen_name": username}
params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUser ? params, Api.userScreenName)
result = parseGraphUser(js)

proc getGraphUserById*(id: string): Future[User] {.async.} =
if id.len == 0 or id.any(c => not c.isDigit): return
let
variables = """{"userId": "$1", "withSuperFollowsUserFields": true}""" % [id]
js = await fetchRaw(graphUserById ? {"variables": variables}, Api.userRestId)
variables = %*{"userId": id}
params = {"variables": $variables, "features": gqlFeatures}
js = await fetchRaw(graphUserById ? params, Api.userRestId)
result = parseGraphUser(js)

proc getGraphUserTweets*(id: string; kind: TimelineKind; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = userTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
(url, apiId) = case kind
of TimelineKind.tweets: (graphUserTweets, Api.userTweets)
of TimelineKind.replies: (graphUserTweetsAndReplies, Api.userTweetsAndReplies)
of TimelineKind.media: (graphUserMedia, Api.userMedia)
js = await fetch(url ? params, apiId)
result = parseGraphTimeline(js, "user", after)

proc getGraphListTweets*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = listTweetsVariables % [id, cursor]
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphListTweets ? params, Api.listTweets)
result = parseGraphTimeline(js, "list", after)

proc getGraphListBySlug*(name, list: string): Future[List] {.async.} =
let
variables = %*{"screenName": name, "listSlug": list, "withHighlightedLabel": false}
url = graphListBySlug ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.listBySlug))
variables = %*{"screenName": name, "listSlug": list}
params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(graphListBySlug ? params, Api.listBySlug))

proc getGraphList*(id: string): Future[List] {.async.} =
let
variables = %*{"listId": id, "withHighlightedLabel": false}
url = graphList ? {"variables": $variables}
result = parseGraphList(await fetch(url, Api.list))
variables = %*{"listId": id}
params = {"variables": $variables, "features": gqlFeatures}
result = parseGraphList(await fetch(graphListById ? params, Api.list))

proc getGraphListMembers*(list: List; after=""): Future[Result[User]] {.async.} =
if list.id.len == 0: return
var
variables = %*{
"listId": list.id,
"withSuperFollowsUserFields": false,
"withBirdwatchPivots": false,
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false,
"withSuperFollowsTweetFields": false
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphListMembers ? {"variables": $variables}
let url = graphListMembers ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphListMembers(await fetchRaw(url, Api.listMembers), after)

proc getListTimeline*(id: string; after=""): Future[Timeline] {.async.} =
proc getGraphTweetResult*(id: string): Future[Tweet] {.async.} =
if id.len == 0: return
let
ps = genParams({"list_id": id, "ranking_mode": "reverse_chronological"}, after)
url = listTimeline ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getTimeline*(id: string; after=""; replies=false): Future[Timeline] {.async.} =
if id.len == 0: return
let
ps = genParams({"userId": id, "include_tweet_replies": $replies}, after)
url = timeline / (id & ".json") ? ps
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getMediaTimeline*(id: string; after=""): Future[Timeline] {.async.} =
if id.len == 0: return
let url = mediaTimeline / (id & ".json") ? genParams(cursor=after)
result = parseTimeline(await fetch(url, Api.timeline), after)

proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))

proc getSearch*[T](query: Query; after=""): Future[Result[T]] {.async.} =
when T is User:
const
searchMode = ("result_filter", "user")
parse = parseUsers
fetchFunc = fetchRaw
else:
const
searchMode = ("tweet_search_mode", "live")
parse = parseTimeline
fetchFunc = fetch

let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[T](beginning: true, query: query)

let url = search ? genParams(searchParams & @[("q", q), searchMode], after)
try:
result = parse(await fetchFunc(url, Api.search), after)
result.query = query
except InternalError:
return Result[T](beginning: true, query: query)
variables = tweetResultVariables % id
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweetResult ? params, Api.tweetResult)
result = parseGraphTweetResult(js)

proc getGraphTweet(id: string; after=""): Future[Conversation] {.async.} =
if id.len == 0: return
let
cursor = if after.len > 0: "\"cursor\":\"$1\"," % after else: ""
variables = tweetVariables % [id, cursor]
params = {"variables": variables, "features": tweetFeatures}
params = {"variables": variables, "features": gqlFeatures}
js = await fetch(graphTweet ? params, Api.tweetDetail)
result = parseGraphConversation(js, id)

Expand All @@ -119,9 +95,51 @@ proc getTweet*(id: string; after=""): Future[Conversation] {.async.} =
if after.len > 0:
result.replies = await getReplies(id, after)

proc getStatus*(id: string): Future[Tweet] {.async.} =
let url = status / (id & ".json") ? genParams()
result = parseStatus(await fetch(url, Api.status))
proc getGraphSearch*(query: Query; after=""): Future[Result[Tweet]] {.async.} =
let q = genQueryParam(query)
if q.len == 0 or q == emptyQuery:
return Result[Tweet](query: query, beginning: true)

var
variables = %*{
"rawQuery": q,
"count": 20,
"product": "Latest",
"withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after)
result.query = query

proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} =
if query.text.len == 0:
return Result[User](query: query, beginning: true)

var url = userSearch ? {
"q": query.text,
"skip_status": "1",
"count": "20",
"page": page
}

result = parseUsers(await fetchRaw(url, Api.userSearch))
result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)

proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return
let
ps = genParams({"screen_name": name, "trim_user": "true"},
count="18", ext=false)
url = photoRail ? ps
result = parsePhotoRail(await fetch(url, Api.timeline))

proc resolve*(url: string; prefs: Prefs): Future[string] {.async.} =
let client = newAsyncHttpClient(maxRedirects=0)
Expand Down
20 changes: 9 additions & 11 deletions src/apiutils.nim
Original file line number Diff line number Diff line change
Expand Up @@ -17,8 +17,8 @@ proc genParams*(pars: openArray[(string, string)] = @[]; cursor="";
result &= p
if ext:
result &= ("ext", "mediaStats")
result &= ("include_ext_alt_text", "true")
result &= ("include_ext_media_availability", "true")
result &= ("include_ext_alt_text", "1")
result &= ("include_ext_media_availability", "1")
if count.len > 0:
result &= ("count", count)
if cursor.len > 0:
Expand All @@ -44,7 +44,7 @@ proc genHeaders*(token: Token = nil): HttpHeaders =
})

template updateToken() =
if api != Api.search and resp.headers.hasKey(rlRemaining):
if resp.headers.hasKey(rlRemaining):
let
remaining = parseInt(resp.headers[rlRemaining])
reset = parseInt(resp.headers[rlReset])
Expand All @@ -67,14 +67,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =

getContent()

# Twitter randomly returns 401 errors with an empty body quite often.
# Retrying the request usually works.
if resp.status == "401 Unauthorized" and result.len == 0:
getContent()

if resp.status == $Http503:
badClient = true
raise newException(InternalError, result)
if resp.status == $Http503:
badClient = true
raise newException(BadClientError, "Bad client")

if result.len > 0:
if resp.headers.getOrDefault("content-encoding") == "gzip":
Expand All @@ -90,6 +85,9 @@ template fetchImpl(result, fetchBody) {.dirty.} =
raise newException(InternalError, $url)
except InternalError as e:
raise e
except BadClientError as e:
release(token, used=true)
raise e
except Exception as e:
echo "error: ", e.name, ", msg: ", e.msg, ", token: ", token[], ", url: ", url
if "length" notin e.msg and "descriptor" notin e.msg:
Expand Down