Merge remote-tracking branch 'upstream/guest_accounts'

This commit is contained in:
PrivacyDev 2023-12-07 11:53:06 -05:00
commit 7d846ed759
39 changed files with 381 additions and 199 deletions

View file

@ -10,25 +10,34 @@ on:
jobs: jobs:
test: test:
runs-on: ubuntu-latest runs-on: buildjet-2vcpu-ubuntu-2204
strategy:
matrix:
nim:
- "1.6.10"
- "1.6.x"
- "2.0.x"
- "devel"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
with: with:
fetch-depth: 0 fetch-depth: 0
- name: Cache nimble - name: Cache nimble
id: cache-nimble id: cache-nimble
uses: actions/cache@v3 uses: buildjet/cache@v3
with: with:
path: ~/.nimble path: ~/.nimble
key: nimble-${{ hashFiles('*.nimble') }} key: ${{ matrix.nim }}-nimble-${{ hashFiles('*.nimble') }}
restore-keys: "nimble-" restore-keys: |
${{ matrix.nim }}-nimble-
- uses: actions/setup-python@v4 - uses: actions/setup-python@v4
with: with:
python-version: "3.10" python-version: "3.10"
cache: "pip" cache: "pip"
- uses: jiro4989/setup-nim-action@v1 - uses: jiro4989/setup-nim-action@v1
with: with:
nim-version: "1.x" nim-version: ${{ matrix.nim }}
repo-token: ${{ secrets.GITHUB_TOKEN }}
- run: nimble build -d:release -Y - run: nimble build -d:release -Y
- run: pip install seleniumbase - run: pip install seleniumbase
- run: seleniumbase install chromedriver - run: seleniumbase install chromedriver
@ -37,12 +46,11 @@ jobs:
run: | run: |
sudo apt install libsass-dev -y sudo apt install libsass-dev -y
cp nitter.example.conf nitter.conf cp nitter.example.conf nitter.conf
sed -i 's/enableDebug = false/enableDebug = true/g' nitter.conf
nimble md nimble md
nimble scss nimble scss
echo '${{ secrets.GUEST_ACCOUNTS }}' > ./guest_accounts.jsonl
- name: Run tests - name: Run tests
env:
GUEST_ACCOUNTS: ${{ secrets.GUEST_ACCOUNTS }}
run: | run: |
echo $GUEST_ACCOUNTS > ./guest_accounts.json
./nitter & ./nitter &
pytest -n4 tests pytest -n8 tests

1
.gitignore vendored
View file

@ -10,4 +10,5 @@ nitter
/public/css/style.css /public/css/style.css
/public/md/*.html /public/md/*.html
nitter.conf nitter.conf
guest_accounts.json*
dump.rdb dump.rdb

View file

@ -1,7 +1,7 @@
FROM alpine:3.17 as nim FROM alpine:3.18 as nim
LABEL maintainer="setenforce@protonmail.com" LABEL maintainer="setenforce@protonmail.com"
RUN apk --no-cache add gcc git libc-dev libsass-dev "nim=1.6.8-r0" nimble pcre RUN apk --no-cache add libsass-dev pcre gcc git libc-dev "nim=1.6.14-r0" "nimble=0.13.1-r2"
WORKDIR /src/nitter WORKDIR /src/nitter
@ -13,11 +13,13 @@ RUN nimble build -d:danger -d:lto -d:strip \
&& nimble scss \ && nimble scss \
&& nimble md && nimble md
FROM alpine:3.17 FROM alpine:3.18
WORKDIR /src/ WORKDIR /src/
RUN apk --no-cache add ca-certificates pcre openssl1.1-compat RUN apk --no-cache add pcre ca-certificates openssl1.1-compat
COPY --from=nim /src/nitter/nitter ./ COPY --from=nim /src/nitter/nitter ./
COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf COPY --from=nim /src/nitter/nitter.example.conf ./nitter.conf
COPY --from=nim /src/nitter/public ./public COPY --from=nim /src/nitter/public ./public
EXPOSE 8080 EXPOSE 8080
RUN adduser -h /src/ -D -s /bin/sh nitter
USER nitter
CMD ./nitter CMD ./nitter

View file

@ -7,12 +7,7 @@
# disable annoying warnings # disable annoying warnings
warning("GcUnsafe2", off) warning("GcUnsafe2", off)
warning("HoleEnumConv", off)
hint("XDeclaredButNotUsed", off) hint("XDeclaredButNotUsed", off)
hint("XCannotRaiseY", off) hint("XCannotRaiseY", off)
hint("User", off) hint("User", off)
const
nimVersion = (major: NimMajor, minor: NimMinor, patch: NimPatch)
when nimVersion >= (1, 6, 0):
warning("HoleEnumConv", off)

View file

@ -23,7 +23,7 @@ redisMaxConnections = 30
hmacKey = "secretkey" # random key for cryptographic signing of video urls hmacKey = "secretkey" # random key for cryptographic signing of video urls
base64Media = false # use base64 encoding for proxied media urls base64Media = false # use base64 encoding for proxied media urls
enableRSS = true # set this to false to disable RSS feeds enableRSS = true # set this to false to disable RSS feeds
enableDebug = false # enable request logs and debug endpoints (/.tokens) enableDebug = false # enable request logs and debug endpoints (/.accounts)
proxy = "" # http/https url, SOCKS proxies are not supported proxy = "" # http/https url, SOCKS proxies are not supported
proxyAuth = "" proxyAuth = ""
tokenCount = 10 tokenCount = 10

View file

@ -10,11 +10,11 @@ bin = @["nitter"]
# Dependencies # Dependencies
requires "nim >= 1.4.8" requires "nim >= 1.6.10"
requires "jester#baca3f" requires "jester#baca3f"
requires "karax#5cf360c" requires "karax#5cf360c"
requires "sass#7dfdd03" requires "sass#7dfdd03"
requires "nimcrypto#4014ef9" requires "nimcrypto#a079df9"
requires "markdown#158efe3" requires "markdown#158efe3"
requires "packedjson#9e6fbb6" requires "packedjson#9e6fbb6"
requires "supersnappy#6c94198" requires "supersnappy#6c94198"
@ -22,7 +22,7 @@ requires "redpool#8b7c1db"
requires "https://github.com/zedeus/redis#d0a0e6f" requires "https://github.com/zedeus/redis#d0a0e6f"
requires "zippy#ca5989a" requires "zippy#ca5989a"
requires "flatty#e668085" requires "flatty#e668085"
requires "jsony#ea811be" requires "jsony#1de1f08"
requires "oauth#b8c163b" requires "oauth#b8c163b"
# Tasks # Tasks

View file

@ -155,25 +155,29 @@ proc getGraphTweetSearch*(query: Query; after=""): Future[Timeline] {.async.} =
if after.len > 0: if after.len > 0:
variables["cursor"] = % after variables["cursor"] = % after
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures} let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch(await fetch(url, Api.search), after) result = parseGraphSearch[Tweets](await fetch(url, Api.search), after)
result.query = query result.query = query
proc getUserSearch*(query: Query; page="1"): Future[Result[User]] {.async.} = proc getGraphUserSearch*(query: Query; after=""): Future[Result[User]] {.async.} =
if query.text.len == 0: if query.text.len == 0:
return Result[User](query: query, beginning: true) return Result[User](query: query, beginning: true)
let var
page = if page.len == 0: "1" else: page variables = %*{
url = userSearch ? genParams({"q": query.text, "skip_status": "1", "page": page}) "rawQuery": query.text,
js = await fetchRaw(url, Api.userSearch) "count": 20,
"product": "People",
result = parseUsers(js) "withDownvotePerspective": false,
"withReactionsMetadata": false,
"withReactionsPerspective": false
}
if after.len > 0:
variables["cursor"] = % after
result.beginning = false
let url = graphSearchTimeline ? {"variables": $variables, "features": gqlFeatures}
result = parseGraphSearch[User](await fetch(url, Api.search), after)
result.query = query result.query = query
if page.len == 0:
result.bottom = "2"
elif page.allCharsInSet(Digits):
result.bottom = $(parseInt(page) + 1)
proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} = proc getPhotoRail*(name: string): Future[PhotoRail] {.async.} =
if name.len == 0: return if name.len == 0: return

View file

@ -1,7 +1,7 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import httpclient, asyncdispatch, options, strutils, uri, times, math, tables import httpclient, asyncdispatch, options, strutils, uri, times, math, tables
import jsony, packedjson, zippy, oauth1 import jsony, packedjson, zippy, oauth1
import types, tokens, consts, parserutils, http_pool import types, auth, consts, parserutils, http_pool
import experimental/types/common import experimental/types/common
import config import config
@ -134,7 +134,7 @@ template fetchImpl(result, additional_headers, fetchBody) {.dirty.} =
except OSError as e: except OSError as e:
raise e raise e
except Exception as e: except Exception as e:
let id = if account.isNil: "null" else: account.id let id = if account.isNil: "null" else: $account.id
echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url echo "error: ", e.name, ", msg: ", e.msg, ", accountId: ", id, ", url: ", url
raise rateLimitError() raise rateLimitError()
finally: finally:

View file

@ -1,11 +1,27 @@
#SPDX-License-Identifier: AGPL-3.0-only #SPDX-License-Identifier: AGPL-3.0-only
import asyncdispatch, times, json, random, strutils, tables, sets import std/[asyncdispatch, times, json, random, sequtils, strutils, tables, packedsets, os]
import types import types
import experimental/parser/guestaccount
# max requests at a time per account to avoid race conditions # max requests at a time per account to avoid race conditions
const const
maxConcurrentReqs = 2 maxConcurrentReqs = 2
dayInSeconds = 24 * 60 * 60 dayInSeconds = 24 * 60 * 60
apiMaxReqs: Table[Api, int] = {
Api.search: 50,
Api.tweetDetail: 150,
Api.photoRail: 180,
Api.userTweets: 500,
Api.userTweetsAndReplies: 500,
Api.userMedia: 500,
Api.userRestId: 500,
Api.userScreenName: 500,
Api.tweetResult: 500,
Api.list: 500,
Api.listTweets: 500,
Api.listMembers: 500,
Api.listBySlug: 500
}.toTable
var var
accountPool: seq[GuestAccount] accountPool: seq[GuestAccount]
@ -14,20 +30,75 @@ var
template log(str: varargs[string, `$`]) = template log(str: varargs[string, `$`]) =
if enableLogging: echo "[accounts] ", str.join("") if enableLogging: echo "[accounts] ", str.join("")
proc getPoolJson*(): JsonNode = proc snowflakeToEpoch(flake: int64): int64 =
var int64(((flake shr 22) + 1288834974657) div 1000)
list = newJObject()
totalReqs = 0
totalPending = 0
limited: HashSet[string]
reqsPerApi: Table[string, int]
proc hasExpired(account: GuestAccount): bool =
let
created = snowflakeToEpoch(account.id)
now = epochTime().int64
daysOld = int(now - created) div dayInSeconds
return daysOld > 30
proc getAccountPoolHealth*(): JsonNode =
let now = epochTime().int let now = epochTime().int
for account in accountPool: var
totalPending.inc(account.pending) totalReqs = 0
limited: PackedSet[int64]
reqsPerApi: Table[string, int]
oldest = now.int64
newest = 0'i64
average = 0'i64
var includeAccount = false for account in accountPool:
let created = snowflakeToEpoch(account.id)
if created > newest:
newest = created
if created < oldest:
oldest = created
average += created
for api in account.apis.keys:
let
apiStatus = account.apis[api]
reqs = apiMaxReqs[api] - apiStatus.remaining
if apiStatus.limited:
limited.incl account.id
# no requests made with this account and endpoint since the limit reset
if apiStatus.reset < now:
continue
reqsPerApi.mgetOrPut($api, 0).inc reqs
totalReqs.inc reqs
if accountPool.len > 0:
average = average div accountPool.len
else:
oldest = 0
average = 0
return %*{
"accounts": %*{
"total": accountPool.len,
"limited": limited.card,
"oldest": $fromUnix(oldest),
"newest": $fromUnix(newest),
"average": $fromUnix(average)
},
"requests": %*{
"total": totalReqs,
"apis": reqsPerApi
}
}
proc getAccountPoolDebug*(): JsonNode =
let now = epochTime().int
var list = newJObject()
for account in accountPool:
let accountJson = %*{ let accountJson = %*{
"apis": newJObject(), "apis": newJObject(),
"pending": account.pending, "pending": account.pending,
@ -46,38 +117,11 @@ proc getPoolJson*(): JsonNode =
if apiStatus.limited: if apiStatus.limited:
obj["limited"] = %true obj["limited"] = %true
limited.incl account.id
accountJson{"apis", $api} = obj accountJson{"apis", $api} = obj
includeAccount = true list[$account.id] = accountJson
let return %list
maxReqs =
case api
of Api.search: 50
of Api.tweetDetail: 150
of Api.photoRail: 180
of Api.userTweets, Api.userTweetsAndReplies, Api.userMedia,
Api.userRestId, Api.userScreenName,
Api.tweetResult,
Api.list, Api.listTweets, Api.listMembers, Api.listBySlug, Api.favorites, Api.retweeters, Api.favoriters, Api.following, Api.followers: 500
of Api.userSearch: 900
reqs = maxReqs - apiStatus.remaining
reqsPerApi[$api] = reqsPerApi.getOrDefault($api, 0) + reqs
totalReqs.inc(reqs)
if includeAccount:
list[account.id] = accountJson
return %*{
"amount": accountPool.len,
"limited": limited.card,
"requests": totalReqs,
"pending": totalPending,
"apis": reqsPerApi,
"accounts": list
}
proc rateLimitError*(): ref RateLimitError = proc rateLimitError*(): ref RateLimitError =
newException(RateLimitError, "rate limited") newException(RateLimitError, "rate limited")
@ -141,12 +185,25 @@ proc setRateLimit*(account: GuestAccount; api: Api; remaining, reset: int) =
account.apis[api] = RateLimit(remaining: remaining, reset: reset) account.apis[api] = RateLimit(remaining: remaining, reset: reset)
proc initAccountPool*(cfg: Config; accounts: JsonNode) = proc initAccountPool*(cfg: Config; path: string) =
enableLogging = cfg.enableDebug enableLogging = cfg.enableDebug
for account in accounts: let jsonlPath = if path.endsWith(".json"): (path & 'l') else: path
accountPool.add GuestAccount(
id: account{"user", "id_str"}.getStr, if fileExists(jsonlPath):
oauthToken: account{"oauth_token"}.getStr, log "Parsing JSONL guest accounts file: ", jsonlPath
oauthSecret: account{"oauth_token_secret"}.getStr, for line in jsonlPath.lines:
) accountPool.add parseGuestAccount(line)
elif fileExists(path):
log "Parsing JSON guest accounts file: ", path
accountPool = parseGuestAccounts(path)
else:
echo "[accounts] ERROR: ", path, " not found. This file is required to authenticate API requests."
quit 1
let accountsPrePurge = accountPool.len
accountPool.keepItIf(not it.hasExpired)
log "Successfully added ", accountPool.len, " valid accounts."
if accountsPrePurge > accountPool.len:
log "Purged ", accountsPrePurge - accountPool.len, " expired accounts."

View file

@ -9,7 +9,6 @@ const
activate* = $(api / "1.1/guest/activate.json") activate* = $(api / "1.1/guest/activate.json")
photoRail* = api / "1.1/statuses/media_timeline.json" photoRail* = api / "1.1/statuses/media_timeline.json"
userSearch* = api / "1.1/users/search.json"
timelineApi = api / "2/timeline" timelineApi = api / "2/timeline"
favorites* = timelineApi / "favorites" favorites* = timelineApi / "favorites"
@ -37,12 +36,10 @@ const
"include_cards": "1", "include_cards": "1",
"include_entities": "1", "include_entities": "1",
"include_profile_interstitial_type": "0", "include_profile_interstitial_type": "0",
"include_quote_count": "1", "include_quote_count": "0",
"include_reply_count": "1", "include_reply_count": "0",
"include_user_entities": "1", "include_user_entities": "0",
"include_ext_reply_count": "1", "include_ext_reply_count": "0",
"include_ext_is_blue_verified": "1",
#"include_ext_verified_type": "1",
"include_ext_media_color": "0", "include_ext_media_color": "0",
"cards_platform": "Web-13", "cards_platform": "Web-13",
"tweet_mode": "extended", "tweet_mode": "extended",

View file

@ -1,7 +1,7 @@
import options import options
import jsony import jsony
import user, ../types/[graphuser, graphlistmembers] import user, ../types/[graphuser, graphlistmembers]
from ../../types import User, Result, Query, QueryKind from ../../types import User, VerifiedType, Result, Query, QueryKind
proc parseGraphUser*(json: string): User = proc parseGraphUser*(json: string): User =
if json.len == 0 or json[0] != '{': if json.len == 0 or json[0] != '{':
@ -12,9 +12,10 @@ proc parseGraphUser*(json: string): User =
if raw.data.userResult.result.unavailableReason.get("") == "Suspended": if raw.data.userResult.result.unavailableReason.get("") == "Suspended":
return User(suspended: true) return User(suspended: true)
result = toUser raw.data.userResult.result.legacy result = raw.data.userResult.result.legacy
result.id = raw.data.userResult.result.restId result.id = raw.data.userResult.result.restId
result.verified = result.verified or raw.data.userResult.result.isBlueVerified if result.verifiedType == VerifiedType.none and raw.data.userResult.result.isBlueVerified:
result.verifiedType = blue
proc parseGraphListMembers*(json, cursor: string): Result[User] = proc parseGraphListMembers*(json, cursor: string): Result[User] =
result = Result[User]( result = Result[User](
@ -30,7 +31,7 @@ proc parseGraphListMembers*(json, cursor: string): Result[User] =
of TimelineTimelineItem: of TimelineTimelineItem:
let userResult = entry.content.itemContent.userResults.result let userResult = entry.content.itemContent.userResults.result
if userResult.restId.len > 0: if userResult.restId.len > 0:
result.content.add toUser userResult.legacy result.content.add userResult.legacy
of TimelineTimelineCursor: of TimelineTimelineCursor:
if entry.content.cursorType == "Bottom": if entry.content.cursorType == "Bottom":
result.bottom = entry.content.value result.bottom = entry.content.value

View file

@ -0,0 +1,21 @@
import std/strutils
import jsony
import ../types/guestaccount
from ../../types import GuestAccount
proc toGuestAccount(account: RawAccount): GuestAccount =
let id = account.oauthToken[0 ..< account.oauthToken.find('-')]
result = GuestAccount(
id: parseBiggestInt(id),
oauthToken: account.oauthToken,
oauthSecret: account.oauthTokenSecret
)
proc parseGuestAccount*(raw: string): GuestAccount =
let rawAccount = raw.fromJson(RawAccount)
result = rawAccount.toGuestAccount
proc parseGuestAccounts*(path: string): seq[GuestAccount] =
let rawAccounts = readFile(path).fromJson(seq[RawAccount])
for account in rawAccounts:
result.add account.toGuestAccount

View file

@ -1,6 +1,6 @@
import std/[options, tables, strutils, strformat, sugar] import std/[options, tables, strutils, strformat, sugar]
import jsony import jsony
import ../types/unifiedcard import user, ../types/unifiedcard
from ../../types import Card, CardKind, Video from ../../types import Card, CardKind, Video
from ../../utils import twimg, https from ../../utils import twimg, https
@ -27,6 +27,14 @@ proc parseMediaDetails(data: ComponentData; card: UnifiedCard; result: var Card)
result.text = data.topicDetail.title result.text = data.topicDetail.title
result.dest = "Topic" result.dest = "Topic"
proc parseJobDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
data.destination.parseDestination(card, result)
result.kind = CardKind.jobDetails
result.title = data.title
result.text = data.shortDescriptionText
result.dest = &"@{data.profileUser.username} · {data.location}"
proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) = proc parseAppDetails(data: ComponentData; card: UnifiedCard; result: var Card) =
let app = card.appStoreData[data.appId][0] let app = card.appStoreData[data.appId][0]
@ -84,6 +92,8 @@ proc parseUnifiedCard*(json: string): Card =
component.parseMedia(card, result) component.parseMedia(card, result)
of buttonGroup: of buttonGroup:
discard discard
of ComponentType.jobDetails:
component.data.parseJobDetails(card, result)
of ComponentType.hidden: of ComponentType.hidden:
result.kind = CardKind.hidden result.kind = CardKind.hidden
of ComponentType.unknown: of ComponentType.unknown:

View file

@ -56,7 +56,7 @@ proc toUser*(raw: RawUser): User =
tweets: raw.statusesCount, tweets: raw.statusesCount,
likes: raw.favouritesCount, likes: raw.favouritesCount,
media: raw.mediaCount, media: raw.mediaCount,
verified: raw.verified or raw.extIsBlueVerified, verifiedType: raw.verifiedType,
protected: raw.protected, protected: raw.protected,
joinDate: parseTwitterDate(raw.createdAt), joinDate: parseTwitterDate(raw.createdAt),
banner: getBanner(raw), banner: getBanner(raw),
@ -68,6 +68,11 @@ proc toUser*(raw: RawUser): User =
result.expandUserEntities(raw) result.expandUserEntities(raw)
proc parseHook*(s: string; i: var int; v: var User) =
var u: RawUser
parseHook(s, i, u)
v = toUser u
proc parseUser*(json: string; username=""): User = proc parseUser*(json: string; username=""): User =
handleErrors: handleErrors:
case error.code case error.code
@ -75,7 +80,7 @@ proc parseUser*(json: string; username=""): User =
of userNotFound: return of userNotFound: return
else: echo "[error - parseUser]: ", error else: echo "[error - parseUser]: ", error
result = toUser json.fromJson(RawUser) result = json.fromJson(User)
proc parseUsers*(json: string; after=""): Result[User] = proc parseUsers*(json: string; after=""): Result[User] =
result = Result[User](beginning: after.len == 0) result = Result[User](beginning: after.len == 0)

View file

@ -1,5 +1,5 @@
import options import options
import user from ../../types import User
type type
GraphUser* = object GraphUser* = object
@ -9,7 +9,7 @@ type
result*: UserResult result*: UserResult
UserResult = object UserResult = object
legacy*: RawUser legacy*: User
restId*: string restId*: string
isBlueVerified*: bool isBlueVerified*: bool
unavailableReason*: Option[string] unavailableReason*: Option[string]

View file

@ -0,0 +1,4 @@
type
RawAccount* = object
oauthToken*: string
oauthTokenSecret*: string

View file

@ -1,5 +1,5 @@
import std/tables import std/tables
import user from ../../types import User
type type
Search* = object Search* = object
@ -7,7 +7,7 @@ type
timeline*: Timeline timeline*: Timeline
GlobalObjects = object GlobalObjects = object
users*: Table[string, RawUser] users*: Table[string, User]
Timeline = object Timeline = object
instructions*: seq[Instructions] instructions*: seq[Instructions]

View file

@ -1,7 +1,10 @@
import options, tables import std/[options, tables, times]
from ../../types import VideoType, VideoVariant import jsony
from ../../types import VideoType, VideoVariant, User
type type
Text* = distinct string
UnifiedCard* = object UnifiedCard* = object
componentObjects*: Table[string, Component] componentObjects*: Table[string, Component]
destinationObjects*: Table[string, Destination] destinationObjects*: Table[string, Destination]
@ -13,6 +16,7 @@ type
media media
swipeableMedia swipeableMedia
buttonGroup buttonGroup
jobDetails
appStoreDetails appStoreDetails
twitterListDetails twitterListDetails
communityDetails communityDetails
@ -29,12 +33,15 @@ type
appId*: string appId*: string
mediaId*: string mediaId*: string
destination*: string destination*: string
location*: string
title*: Text title*: Text
subtitle*: Text subtitle*: Text
name*: Text name*: Text
memberCount*: int memberCount*: int
mediaList*: seq[MediaItem] mediaList*: seq[MediaItem]
topicDetail*: tuple[title: Text] topicDetail*: tuple[title: Text]
profileUser*: User
shortDescriptionText*: string
MediaItem* = object MediaItem* = object
id*: string id*: string
@ -69,12 +76,9 @@ type
title*: Text title*: Text
category*: Text category*: Text
Text = object
content: string
TypeField = Component | Destination | MediaEntity | AppStoreData TypeField = Component | Destination | MediaEntity | AppStoreData
converter fromText*(text: Text): string = text.content converter fromText*(text: Text): string = string(text)
proc renameHook*(v: var TypeField; fieldName: var string) = proc renameHook*(v: var TypeField; fieldName: var string) =
if fieldName == "type": if fieldName == "type":
@ -86,6 +90,7 @@ proc enumHook*(s: string; v: var ComponentType) =
of "media": media of "media": media
of "swipeable_media": swipeableMedia of "swipeable_media": swipeableMedia
of "button_group": buttonGroup of "button_group": buttonGroup
of "job_details": jobDetails
of "app_store_details": appStoreDetails of "app_store_details": appStoreDetails
of "twitter_list_details": twitterListDetails of "twitter_list_details": twitterListDetails
of "community_details": communityDetails of "community_details": communityDetails
@ -106,3 +111,18 @@ proc enumHook*(s: string; v: var MediaType) =
of "photo": photo of "photo": photo
of "model3d": model3d of "model3d": model3d
else: echo "ERROR: Unknown enum value (MediaType): ", s; photo else: echo "ERROR: Unknown enum value (MediaType): ", s; photo
proc parseHook*(s: string; i: var int; v: var DateTime) =
var str: string
parseHook(s, i, str)
v = parse(str, "yyyy-MM-dd hh:mm:ss")
proc parseHook*(s: string; i: var int; v: var Text) =
if s[i] == '"':
var str: string
parseHook(s, i, str)
v = Text(str)
else:
var t: tuple[content: string]
parseHook(s, i, t)
v = Text(t.content)

View file

@ -1,5 +1,6 @@
import options import options
import common import common
from ../../types import VerifiedType
type type
RawUser* = object RawUser* = object
@ -15,8 +16,7 @@ type
favouritesCount*: int favouritesCount*: int
statusesCount*: int statusesCount*: int
mediaCount*: int mediaCount*: int
verified*: bool verifiedType*: VerifiedType
extIsBlueVerified*: bool
protected*: bool protected*: bool
profileLinkColor*: string profileLinkColor*: string
profileBannerUrl*: string profileBannerUrl*: string

View file

@ -4,11 +4,10 @@ import config
from net import Port from net import Port
from htmlgen import a from htmlgen import a
from os import getEnv from os import getEnv
from json import parseJson
import jester import jester
import types, config, prefs, formatters, redis_cache, http_pool, tokens import types, config, prefs, formatters, redis_cache, http_pool, auth
import views/[general, about] import views/[general, about]
import routes/[ import routes/[
preferences, timeline, status, media, search, rss, list, debug, preferences, timeline, status, media, search, rss, list, debug,
@ -19,9 +18,8 @@ const issuesUrl = "https://github.com/zedeus/nitter/issues"
let let
accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json") accountsPath = getEnv("NITTER_ACCOUNTS_FILE", "./guest_accounts.json")
accounts = parseJson(readFile(accountsPath))
initAccountPool(cfg, parseJson(readFile(accountsPath))) initAccountPool(cfg, accountsPath)
if not cfg.enableDebug: if not cfg.enableDebug:
# Silence Jester's query warning # Silence Jester's query warning

View file

@ -22,7 +22,7 @@ proc parseUser(js: JsonNode; id=""): User =
tweets: js{"statuses_count"}.getInt, tweets: js{"statuses_count"}.getInt,
likes: js{"favourites_count"}.getInt, likes: js{"favourites_count"}.getInt,
media: js{"media_count"}.getInt, media: js{"media_count"}.getInt,
verified: js{"verified"}.getBool or js{"ext_is_blue_verified"}.getBool, verifiedType: parseEnum[VerifiedType](js{"verified_type"}.getStr("None")),
protected: js{"protected"}.getBool, protected: js{"protected"}.getBool,
joinDate: js{"created_at"}.getTime joinDate: js{"created_at"}.getTime
) )
@ -35,8 +35,8 @@ proc parseGraphUser(js: JsonNode): User =
user = ? js{"user_results", "result"} user = ? js{"user_results", "result"}
result = parseUser(user{"legacy"}) result = parseUser(user{"legacy"})
if "is_blue_verified" in user: if result.verifiedType == VerifiedType.none and user{"is_blue_verified"}.getBool(false):
result.verified = user{"is_blue_verified"}.getBool() result.verifiedType = blue
proc parseGraphList*(js: JsonNode): List = proc parseGraphList*(js: JsonNode): List =
if js.isNull: return if js.isNull: return
@ -220,8 +220,6 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
) )
) )
result.expandTweetEntities(js)
# fix for pinned threads # fix for pinned threads
if result.hasThread and result.threadId == 0: if result.hasThread and result.threadId == 0:
result.threadId = js{"self_thread", "id_str"}.getId result.threadId = js{"self_thread", "id_str"}.getId
@ -258,6 +256,8 @@ proc parseTweet(js: JsonNode; jsCard: JsonNode = newJNull()): Tweet =
else: else:
result.card = some parseCard(jsCard, js{"entities", "urls"}) result.card = some parseCard(jsCard, js{"entities", "urls"})
result.expandTweetEntities(js)
with jsMedia, js{"extended_entities", "media"}: with jsMedia, js{"extended_entities", "media"}:
for m in jsMedia: for m in jsMedia:
case m{"type"}.getStr case m{"type"}.getStr
@ -442,6 +442,8 @@ proc parseGraphTweet(js: JsonNode; isLegacy=false): Tweet =
return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.") return Tweet(text: "You're unable to view this Tweet because it's only available to the Subscribers of the account owner.")
of "TweetWithVisibilityResults": of "TweetWithVisibilityResults":
return parseGraphTweet(js{"tweet"}, isLegacy) return parseGraphTweet(js{"tweet"}, isLegacy)
else:
discard
if not js.hasKey("legacy"): if not js.hasKey("legacy"):
return Tweet() return Tweet()
@ -592,8 +594,8 @@ proc parseGraphRetweetersTimeline*(js: JsonNode; root: string; after=""): UsersT
proc parseGraphFollowTimeline*(js: JsonNode; root: string; after=""): UsersTimeline = proc parseGraphFollowTimeline*(js: JsonNode; root: string; after=""): UsersTimeline =
return parseGraphUsersTimeline(js{"data", "user", "result", "timeline", "timeline"}, after) return parseGraphUsersTimeline(js{"data", "user", "result", "timeline", "timeline"}, after)
proc parseGraphSearch*(js: JsonNode; after=""): Timeline = proc parseGraphSearch*[T: User | Tweets](js: JsonNode; after=""): Result[T] =
result = Timeline(beginning: after.len == 0) result = Result[T](beginning: after.len == 0)
let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"} let instructions = js{"data", "search_by_raw_query", "search_timeline", "timeline", "instructions"}
if instructions.len == 0: if instructions.len == 0:
@ -604,13 +606,19 @@ proc parseGraphSearch*(js: JsonNode; after=""): Timeline =
if typ == "TimelineAddEntries": if typ == "TimelineAddEntries":
for e in instruction{"entries"}: for e in instruction{"entries"}:
let entryId = e{"entryId"}.getStr let entryId = e{"entryId"}.getStr
if entryId.startsWith("tweet"): when T is Tweets:
with tweetRes, e{"content", "itemContent", "tweet_results", "result"}: if entryId.startsWith("tweet"):
let tweet = parseGraphTweet(tweetRes, true) with tweetRes, e{"content", "itemContent", "tweet_results", "result"}:
if not tweet.available: let tweet = parseGraphTweet(tweetRes)
tweet.id = parseBiggestInt(entryId.getId()) if not tweet.available:
result.content.add tweet tweet.id = parseBiggestInt(entryId.getId())
elif entryId.startsWith("cursor-bottom"): result.content.add tweet
elif T is User:
if entryId.startsWith("user"):
with userRes, e{"content", "itemContent"}:
result.content.add parseGraphUser(userRes)
if entryId.startsWith("cursor-bottom"):
result.bottom = e{"content", "value"}.getStr result.bottom = e{"content", "value"}.getStr
elif typ == "TimelineReplaceEntry": elif typ == "TimelineReplaceEntry":
if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"): if instruction{"entry_id_to_replace"}.getStr.startsWith("cursor-bottom"):

View file

@ -1,9 +1,17 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import std/[strutils, times, macros, htmlgen, options, algorithm, re] import std/[times, macros, htmlgen, options, algorithm, re]
import std/strutils except escape
import std/unicode except strip import std/unicode except strip
from xmltree import escape
import packedjson import packedjson
import types, utils, formatters import types, utils, formatters
const
unicodeOpen = "\uFFFA"
unicodeClose = "\uFFFB"
xmlOpen = escape("<")
xmlClose = escape(">")
let let
unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})" unRegex = re"(^|[^A-z0-9-_./?])@([A-z0-9_]{1,15})"
unReplace = "$1<a href=\"/$2\">@$2</a>" unReplace = "$1<a href=\"/$2\">@$2</a>"
@ -238,7 +246,7 @@ proc expandUserEntities*(user: var User; js: JsonNode) =
.replacef(htRegex, htReplace) .replacef(htRegex, htReplace)
proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int]; proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlice: Slice[int];
replyTo=""; hasQuote=false) = replyTo=""; hasRedundantLink=false) =
let hasCard = tweet.card.isSome let hasCard = tweet.card.isSome
var replacements = newSeq[ReplaceSlice]() var replacements = newSeq[ReplaceSlice]()
@ -249,7 +257,7 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
if urlStr.len == 0 or urlStr notin text: if urlStr.len == 0 or urlStr notin text:
continue continue
replacements.extractUrls(u, textSlice.b, hideTwitter = hasQuote) replacements.extractUrls(u, textSlice.b, hideTwitter = hasRedundantLink)
if hasCard and u{"url"}.getStr == get(tweet.card).url: if hasCard and u{"url"}.getStr == get(tweet.card).url:
get(tweet.card).url = u{"expanded_url"}.getStr get(tweet.card).url = u{"expanded_url"}.getStr
@ -289,9 +297,10 @@ proc expandTextEntities(tweet: Tweet; entities: JsonNode; text: string; textSlic
proc expandTweetEntities*(tweet: Tweet; js: JsonNode) = proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
let let
entities = ? js{"entities"} entities = ? js{"entities"}
hasQuote = js{"is_quote_status"}.getBool
textRange = js{"display_text_range"} textRange = js{"display_text_range"}
textSlice = textRange{0}.getInt .. textRange{1}.getInt textSlice = textRange{0}.getInt .. textRange{1}.getInt
hasQuote = js{"is_quote_status"}.getBool
hasJobCard = tweet.card.isSome and get(tweet.card).kind == jobDetails
var replyTo = "" var replyTo = ""
if tweet.replyId != 0: if tweet.replyId != 0:
@ -299,12 +308,14 @@ proc expandTweetEntities*(tweet: Tweet; js: JsonNode) =
replyTo = reply.getStr replyTo = reply.getStr
tweet.reply.add replyTo tweet.reply.add replyTo
tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote) tweet.expandTextEntities(entities, tweet.text, textSlice, replyTo, hasQuote or hasJobCard)
proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) = proc expandNoteTweetEntities*(tweet: Tweet; js: JsonNode) =
let let
entities = ? js{"entity_set"} entities = ? js{"entity_set"}
text = js{"text"}.getStr text = js{"text"}.getStr.multiReplace(("<", unicodeOpen), (">", unicodeClose))
textSlice = 0..text.runeLen textSlice = 0..text.runeLen
tweet.expandTextEntities(entities, text, textSlice) tweet.expandTextEntities(entities, text, textSlice)
tweet.text = tweet.text.multiReplace((unicodeOpen, xmlOpen), (unicodeClose, xmlClose))

View file

@ -67,7 +67,7 @@ proc genQueryParam*(query: Query): string =
param &= "OR " param &= "OR "
if query.fromUser.len > 0 and query.kind in {posts, media}: if query.fromUser.len > 0 and query.kind in {posts, media}:
param &= "filter:self_threads OR-filter:replies " param &= "filter:self_threads OR -filter:replies "
if "nativeretweets" notin query.excludes: if "nativeretweets" notin query.excludes:
param &= "include:nativeretweets " param &= "include:nativeretweets "

View file

@ -52,6 +52,7 @@ proc initRedisPool*(cfg: Config) {.async.} =
await migrate("profileDates", "p:*") await migrate("profileDates", "p:*")
await migrate("profileStats", "p:*") await migrate("profileStats", "p:*")
await migrate("userType", "p:*") await migrate("userType", "p:*")
await migrate("verifiedType", "p:*")
pool.withAcquire(r): pool.withAcquire(r):
# optimize memory usage for user ID buckets # optimize memory usage for user ID buckets
@ -85,7 +86,7 @@ proc cache*(data: List) {.async.} =
await setEx(data.listKey, listCacheTime, compress(toFlatty(data))) await setEx(data.listKey, listCacheTime, compress(toFlatty(data)))
proc cache*(data: PhotoRail; name: string) {.async.} = proc cache*(data: PhotoRail; name: string) {.async.} =
await setEx("pr:" & toLower(name), baseCacheTime, compress(toFlatty(data))) await setEx("pr:" & toLower(name), baseCacheTime * 2, compress(toFlatty(data)))
proc cache*(data: User) {.async.} = proc cache*(data: User) {.async.} =
if data.username.len == 0: return if data.username.len == 0: return

View file

@ -1,10 +1,13 @@
# SPDX-License-Identifier: AGPL-3.0-only # SPDX-License-Identifier: AGPL-3.0-only
import jester import jester
import router_utils import router_utils
import ".."/[tokens, types] import ".."/[auth, types]
proc createDebugRouter*(cfg: Config) = proc createDebugRouter*(cfg: Config) =
router debug: router debug:
get "/.tokens": get "/.health":
respJson getAccountPoolHealth()
get "/.accounts":
cond cfg.enableDebug cond cfg.enableDebug
respJson getPoolJson() respJson getAccountPoolDebug()

View file

@ -29,7 +29,7 @@ proc createSearchRouter*(cfg: Config) =
redirect("/" & q) redirect("/" & q)
var users: Result[User] var users: Result[User]
try: try:
users = await getUserSearch(query, getCursor()) users = await getGraphUserSearch(query, getCursor())
except InternalError: except InternalError:
users = Result[User](beginning: true, query: query) users = Result[User](beginning: true, query: query)
resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title) resp renderMain(renderUserSearch(users, prefs), request, cfg, prefs, title)

View file

@ -28,6 +28,8 @@ $more_replies_dots: #AD433B;
$error_red: #420A05; $error_red: #420A05;
$verified_blue: #1DA1F2; $verified_blue: #1DA1F2;
$verified_business: #FAC82B;
$verified_government: #C1B6A4;
$icon_text: $fg_color; $icon_text: $fg_color;
$tab: $fg_color; $tab: $fg_color;

View file

@ -39,6 +39,8 @@ body {
--error_red: #{$error_red}; --error_red: #{$error_red};
--verified_blue: #{$verified_blue}; --verified_blue: #{$verified_blue};
--verified_business: #{$verified_business};
--verified_government: #{$verified_government};
--icon_text: #{$icon_text}; --icon_text: #{$icon_text};
--tab: #{$fg_color}; --tab: #{$fg_color};
@ -141,17 +143,30 @@ ul {
.verified-icon { .verified-icon {
color: var(--icon_text); color: var(--icon_text);
background-color: var(--verified_blue);
border-radius: 50%; border-radius: 50%;
flex-shrink: 0; flex-shrink: 0;
margin: 2px 0 3px 3px; margin: 2px 0 3px 3px;
padding-top: 2px; padding-top: 3px;
height: 12px; height: 11px;
width: 14px; width: 14px;
font-size: 8px; font-size: 8px;
display: inline-block; display: inline-block;
text-align: center; text-align: center;
vertical-align: middle; vertical-align: middle;
&.blue {
background-color: var(--verified_blue);
}
&.business {
color: var(--bg_panel);
background-color: var(--verified_business);
}
&.government {
color: var(--bg_panel);
background-color: var(--verified_government);
}
} }
@media(max-width: 600px) { @media(max-width: 600px) {

View file

@ -70,8 +70,9 @@ nav {
.lp { .lp {
height: 14px; height: 14px;
margin-top: 2px; display: inline-block;
display: block; position: relative;
top: 2px;
fill: var(--fg_nav); fill: var(--fg_nav);
&:hover { &:hover {

View file

@ -115,7 +115,7 @@
} }
.profile-card-tabs-name { .profile-card-tabs-name {
@include breakable; flex-shrink: 100;
} }
.profile-card-avatar { .profile-card-avatar {

View file

@ -14,6 +14,8 @@
button { button {
margin: 0 2px 0 0; margin: 0 2px 0 0;
height: 23px; height: 23px;
display: flex;
align-items: center;
} }
.pref-input { .pref-input {

View file

@ -10,16 +10,13 @@ type
BadClientError* = object of CatchableError BadClientError* = object of CatchableError
TimelineKind* {.pure.} = enum TimelineKind* {.pure.} = enum
tweets tweets, replies, media
replies
media
Api* {.pure.} = enum Api* {.pure.} = enum
tweetDetail tweetDetail
tweetResult tweetResult
photoRail photoRail
search search
userSearch
list list
listBySlug listBySlug
listMembers listMembers
@ -42,7 +39,7 @@ type
limitedAt*: int limitedAt*: int
GuestAccount* = ref object GuestAccount* = ref object
id*: string id*: int64
oauthToken*: string oauthToken*: string
oauthSecret*: string oauthSecret*: string
pending*: int pending*: int
@ -69,6 +66,12 @@ type
tweetUnavailable = 421 tweetUnavailable = 421
tweetCensored = 422 tweetCensored = 422
VerifiedType* = enum
none = "None"
blue = "Blue"
business = "Business"
government = "Government"
User* = object User* = object
id*: string id*: string
username*: string username*: string
@ -84,7 +87,7 @@ type
tweets*: int tweets*: int
likes*: int likes*: int
media*: int media*: int
verified*: bool verifiedType*: VerifiedType
protected*: bool protected*: bool
suspended*: bool suspended*: bool
joinDate*: DateTime joinDate*: DateTime
@ -168,6 +171,7 @@ type
imageDirectMessage = "image_direct_message" imageDirectMessage = "image_direct_message"
audiospace = "audiospace" audiospace = "audiospace"
newsletterPublication = "newsletter_publication" newsletterPublication = "newsletter_publication"
jobDetails = "job_details"
hidden hidden
unknown unknown

View file

@ -16,7 +16,8 @@ const
"twimg.com", "twimg.com",
"abs.twimg.com", "abs.twimg.com",
"pbs.twimg.com", "pbs.twimg.com",
"video.twimg.com" "video.twimg.com",
"x.com"
] ]
proc setHmacKey*(key: string) = proc setHmacKey*(key: string) =
@ -57,4 +58,4 @@ proc isTwitterUrl*(uri: Uri): bool =
uri.hostname in twitterDomains uri.hostname in twitterDomains
proc isTwitterUrl*(url: string): bool = proc isTwitterUrl*(url: string): bool =
parseUri(url).hostname in twitterDomains isTwitterUrl(parseUri(url))

View file

@ -52,7 +52,7 @@ proc renderHead*(prefs: Prefs; cfg: Config; req: Request; titleText=""; desc="";
let opensearchUrl = getUrlPrefix(cfg) & "/opensearch" let opensearchUrl = getUrlPrefix(cfg) & "/opensearch"
buildHtml(head): buildHtml(head):
link(rel="stylesheet", type="text/css", href="/css/style.css?v=18") link(rel="stylesheet", type="text/css", href="/css/style.css?v=19")
link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2") link(rel="stylesheet", type="text/css", href="/css/fontello.css?v=2")
if theme.len > 0: if theme.len > 0:

View file

@ -23,6 +23,13 @@ proc icon*(icon: string; text=""; title=""; class=""; href=""): VNode =
if text.len > 0: if text.len > 0:
text " " & text text " " & text
template verifiedIcon*(user: User): untyped {.dirty.} =
if user.verifiedType != VerifiedType.none:
let lower = ($user.verifiedType).toLowerAscii()
icon "ok", class=(&"verified-icon {lower}"), title=(&"Verified {lower} account")
else:
text ""
proc linkUser*(user: User, class=""): VNode = proc linkUser*(user: User, class=""): VNode =
let let
isName = "username" notin class isName = "username" notin class
@ -32,11 +39,11 @@ proc linkUser*(user: User, class=""): VNode =
buildHtml(a(href=href, class=class, title=nameText)): buildHtml(a(href=href, class=class, title=nameText)):
text nameText text nameText
if isName and user.verified: if isName:
icon "ok", class="verified-icon", title="Verified account" verifiedIcon(user)
if isName and user.protected: if user.protected:
text " " text " "
icon "lock", title="Protected account" icon "lock", title="Protected account"
proc linkText*(text: string; class=""): VNode = proc linkText*(text: string; class=""): VNode =
let url = if "http" notin text: https & text else: text let url = if "http" notin text: https & text else: text

View file

@ -205,8 +205,7 @@ proc renderAttribution(user: User; prefs: Prefs): VNode =
buildHtml(a(class="attribution", href=("/" & user.username))): buildHtml(a(class="attribution", href=("/" & user.username))):
renderMiniAvatar(user, prefs) renderMiniAvatar(user, prefs)
strong: text user.fullname strong: text user.fullname
if user.verified: verifiedIcon(user)
icon "ok", class="verified-icon", title="Verified account"
proc renderMediaTags(tags: seq[User]): VNode = proc renderMediaTags(tags: seq[User]): VNode =
buildHtml(tdiv(class="media-tag-block")): buildHtml(tdiv(class="media-tag-block")):

View file

@ -21,9 +21,9 @@ card = [
no_thumb = [ no_thumb = [
['FluentAI/status/1116417904831029248', ['FluentAI/status/1116417904831029248',
'Amazons Alexa isnt just AI — thousands of humans are listening', 'LinkedIn',
'One of the only ways to improve Alexa is to have human beings check it for errors', 'This link will take you to a page thats not on LinkedIn',
'theverge.com'], 'lnkd.in'],
['Thom_Wolf/status/1122466524860702729', ['Thom_Wolf/status/1122466524860702729',
'facebookresearch/fairseq', 'facebookresearch/fairseq',

View file

@ -9,7 +9,7 @@ text = [
What are we doing wrong? reuters.com/article/us-norwa"""], What are we doing wrong? reuters.com/article/us-norwa"""],
['nim_lang/status/1491461266849808397#m', ['nim_lang/status/1491461266849808397#m',
'Nim language', '@nim_lang', 'Nim', '@nim_lang',
"""What's better than Nim 1.6.0? """What's better than Nim 1.6.0?
Nim 1.6.2 :) Nim 1.6.2 :)

View file

@ -1,4 +1,4 @@
from base import BaseTestCase, Tweet, get_timeline_tweet from base import BaseTestCase, Tweet, Conversation, get_timeline_tweet
from parameterized import parameterized from parameterized import parameterized
# image = tweet + 'div.attachments.media-body > div > div > a > div > img' # image = tweet + 'div.attachments.media-body > div > div > a > div > img'
@ -35,7 +35,16 @@ multiline = [
CALM CALM
AND AND
CLICHÉ CLICHÉ
ON"""] ON"""],
[1718660434457239868, 'WebDesignMuseum',
"""
Happy 32nd Birthday HTML tags!
On October 29, 1991, the internet pioneer, Tim Berners-Lee, published a document entitled HTML Tags.
The document contained a description of the first 18 HTML tags: <title>, <nextid>, <a>, <isindex>, <plaintext>, <listing>, <p>, <h1><h6>, <address>, <hp1>, <hp2>, <dl>, <dt>, <dd>, <ul>, <li>,<menu> and <dir>. The design of the first version of HTML language was influenced by the SGML universal markup language.
#WebDesignHistory"""]
] ]
link = [ link = [
@ -74,22 +83,18 @@ retweet = [
[3, 'mobile_test_8', 'mobile test 8', 'jack', '@jack', 'twttr'] [3, 'mobile_test_8', 'mobile test 8', 'jack', '@jack', 'twttr']
] ]
# reply = [
# ['mobile_test/with_replies', 15]
# ]
class TweetTest(BaseTestCase): class TweetTest(BaseTestCase):
# @parameterized.expand(timeline) @parameterized.expand(timeline)
# def test_timeline(self, index, fullname, username, date, tid, text): def test_timeline(self, index, fullname, username, date, tid, text):
# self.open_nitter(username) self.open_nitter(username)
# tweet = get_timeline_tweet(index) tweet = get_timeline_tweet(index)
# self.assert_exact_text(fullname, tweet.fullname) self.assert_exact_text(fullname, tweet.fullname)
# self.assert_exact_text('@' + username, tweet.username) self.assert_exact_text('@' + username, tweet.username)
# self.assert_exact_text(date, tweet.date) self.assert_exact_text(date, tweet.date)
# self.assert_text(text, tweet.text) self.assert_text(text, tweet.text)
# permalink = self.find_element(tweet.date + ' a') permalink = self.find_element(tweet.date + ' a')
# self.assertIn(tid, permalink.get_attribute('href')) self.assertIn(tid, permalink.get_attribute('href'))
@parameterized.expand(status) @parameterized.expand(status)
def test_status(self, tid, fullname, username, date, text): def test_status(self, tid, fullname, username, date, text):
@ -103,18 +108,18 @@ class TweetTest(BaseTestCase):
@parameterized.expand(multiline) @parameterized.expand(multiline)
def test_multiline_formatting(self, tid, username, text): def test_multiline_formatting(self, tid, username, text):
self.open_nitter(f'{username}/status/{tid}') self.open_nitter(f'{username}/status/{tid}')
self.assert_text(text.strip('\n'), '.main-tweet') self.assert_text(text.strip('\n'), Conversation.main)
@parameterized.expand(emoji) @parameterized.expand(emoji)
def test_emoji(self, tweet, text): def test_emoji(self, tweet, text):
self.open_nitter(tweet) self.open_nitter(tweet)
self.assert_text(text, '.main-tweet') self.assert_text(text, Conversation.main)
@parameterized.expand(link) @parameterized.expand(link)
def test_link(self, tweet, links): def test_link(self, tweet, links):
self.open_nitter(tweet) self.open_nitter(tweet)
for link in links: for link in links:
self.assert_text(link, '.main-tweet') self.assert_text(link, Conversation.main)
@parameterized.expand(username) @parameterized.expand(username)
def test_username(self, tweet, usernames): def test_username(self, tweet, usernames):
@ -123,22 +128,22 @@ class TweetTest(BaseTestCase):
link = self.find_link_text(f'@{un}') link = self.find_link_text(f'@{un}')
self.assertIn(f'/{un}', link.get_property('href')) self.assertIn(f'/{un}', link.get_property('href'))
# @parameterized.expand(retweet) @parameterized.expand(retweet)
# def test_retweet(self, index, url, retweet_by, fullname, username, text): def test_retweet(self, index, url, retweet_by, fullname, username, text):
# self.open_nitter(url) self.open_nitter(url)
# tweet = get_timeline_tweet(index) tweet = get_timeline_tweet(index)
# self.assert_text(f'{retweet_by} retweeted', tweet.retweet) self.assert_text(f'{retweet_by} retweeted', tweet.retweet)
# self.assert_text(text, tweet.text) self.assert_text(text, tweet.text)
# self.assert_exact_text(fullname, tweet.fullname) self.assert_exact_text(fullname, tweet.fullname)
# self.assert_exact_text(username, tweet.username) self.assert_exact_text(username, tweet.username)
@parameterized.expand(invalid) @parameterized.expand(invalid)
def test_invalid_id(self, tweet): def test_invalid_id(self, tweet):
self.open_nitter(tweet) self.open_nitter(tweet)
self.assert_text('Tweet not found', '.error-panel') self.assert_text('Tweet not found', '.error-panel')
# @parameterized.expand(reply) #@parameterized.expand(reply)
# def test_thread(self, tweet, num): #def test_thread(self, tweet, num):
# self.open_nitter(tweet) #self.open_nitter(tweet)
# thread = self.find_element(f'.timeline > div:nth-child({num})') #thread = self.find_element(f'.timeline > div:nth-child({num})')
# self.assertIn(thread.get_attribute('class'), 'thread-line') #self.assertIn(thread.get_attribute('class'), 'thread-line')