зеркало из
https://github.com/viginum-datalab/twscrape.git
synced 2025-10-29 05:04:22 +02:00
Twitter GraphQL and Search API implementation with SNScrape data models.
Usage
import asyncio
from twapi import AccountsPool, API, gather
from twapi.logger import set_log_level
async def main():
pool = AccountsPool()
pool.add_account("user1", "pass1", "user1@example.com", "email_pass1")
pool.add_account("user2", "pass2", "user2@example.com", "email_pass2")
# login all accounts if required (not account file found)
# session file will be saved to `accounts/{username}.json`
await pool.login()
api = API(pool)
# search api
await gather(api.search("elon musk", limit=20)) # list[Tweet]
# graphql api
tweet_id = 20
user_id, user_login = 2244994945, "twitterdev"
await api.tweet_details(tweet_id) # Tweet
await gather(api.retweeters(tweet_id, limit=20)) # list[User]
await gather(api.favoriters(tweet_id, limit=20)) # list[User]
await api.user_by_id(user_id) # User
await api.user_by_login(user_login) # User
await gather(api.followers(user_id, limit=20)) # list[User]
await gather(api.following(user_id, limit=20)) # list[User]
await gather(api.user_tweets(user_id, limit=20)) # list[Tweet]
await gather(api.user_tweets_and_replies(user_id, limit=20)) # list[Tweet]
# note 1: limit is optional, default is -1 (no limit)
# note 2: all methods have `raw` version e.g.:
async for tweet in api.search("elon musk"):
print(tweet.id, tweet.user.username, tweet.rawContent) # tweet is `Tweet` object
async for rep in api.search_raw("elon musk"):
print(rep.status_code, rep.json()) # rep is `httpx.Response` object
# change log level, default info
set_log_level("DEBUG")
if __name__ == "__main__":
asyncio.run(main())
Languages
Python
97.7%
Makefile
2.3%