зеркало из
https://github.com/viginum-datalab/twscrape.git
synced 2025-10-29 21:16:25 +02:00
add more format of cookies to parse
Этот коммит содержится в:
родитель
2c1c0cce97
Коммит
e87e4ea6da
@ -294,56 +294,3 @@ async def test_issue_28():
|
|||||||
assert doc.quotedTweet.id != doc.id
|
assert doc.quotedTweet.id != doc.id
|
||||||
check_tweet(doc.quotedTweet)
|
check_tweet(doc.quotedTweet)
|
||||||
assert doc.quotedTweet.viewCount is not None
|
assert doc.quotedTweet.viewCount is not None
|
||||||
|
|
||||||
|
|
||||||
async def main():
|
|
||||||
# prepare mock files from real twitter replies
|
|
||||||
# you need to have some account to perform this
|
|
||||||
FRESH = False
|
|
||||||
|
|
||||||
pool = AccountsPool()
|
|
||||||
api = API(pool)
|
|
||||||
|
|
||||||
jobs = [
|
|
||||||
(Files.search_raw, lambda: api.search_raw("elon musk lang:en", limit=20)),
|
|
||||||
(Files.user_by_id_raw, lambda: api.user_by_id_raw(2244994945)),
|
|
||||||
(Files.user_by_login_raw, lambda: api.user_by_login_raw("twitterdev")),
|
|
||||||
(Files.tweet_details_raw, lambda: api.tweet_details_raw(1649191520250245121)),
|
|
||||||
(Files.followers_raw, lambda: api.followers_raw(2244994945)),
|
|
||||||
(Files.following_raw, lambda: api.following_raw(2244994945)),
|
|
||||||
(Files.retweeters_raw, lambda: api.retweeters_raw(1649191520250245121)),
|
|
||||||
(Files.favoriters_raw, lambda: api.favoriters_raw(1649191520250245121)),
|
|
||||||
(Files.user_tweets_raw, lambda: api.user_tweets_raw(2244994945)),
|
|
||||||
(Files.user_tweets_and_replies_raw, lambda: api.user_tweets_and_replies_raw(2244994945)),
|
|
||||||
]
|
|
||||||
|
|
||||||
for filename, fn in jobs:
|
|
||||||
filename = os.path.join(DATA_DIR, f"{filename}")
|
|
||||||
print("-" * 20)
|
|
||||||
if os.path.exists(filename) and FRESH is False:
|
|
||||||
print(f"File {filename} already exists")
|
|
||||||
continue
|
|
||||||
|
|
||||||
print(f"Getting data for {filename}")
|
|
||||||
|
|
||||||
rep = fn()
|
|
||||||
is_coroutine = getattr(rep, "__aiter__", None) is None
|
|
||||||
|
|
||||||
data = None
|
|
||||||
if is_coroutine:
|
|
||||||
data = await rep
|
|
||||||
else:
|
|
||||||
async for x in rep:
|
|
||||||
data = x
|
|
||||||
break
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
print(f"Failed to get data for {filename}")
|
|
||||||
continue
|
|
||||||
|
|
||||||
with open(filename, "w") as fp:
|
|
||||||
fp.write(data.text)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
asyncio.run(main())
|
|
||||||
|
|||||||
@ -78,7 +78,7 @@ async def migrate(db: aiosqlite.Connection):
|
|||||||
3: v3,
|
3: v3,
|
||||||
}
|
}
|
||||||
|
|
||||||
logger.debug(f"Current migration v{uv} (latest v{len(migrations)})")
|
# logger.debug(f"Current migration v{uv} (latest v{len(migrations)})")
|
||||||
for i in range(uv + 1, len(migrations) + 1):
|
for i in range(uv + 1, len(migrations) + 1):
|
||||||
logger.info(f"Running migration to v{i}")
|
logger.info(f"Running migration to v{i}")
|
||||||
try:
|
try:
|
||||||
|
|||||||
@ -193,6 +193,9 @@ def parse_cookies(val: str) -> dict[str, str]:
|
|||||||
try:
|
try:
|
||||||
try:
|
try:
|
||||||
res = json.loads(val)
|
res = json.loads(val)
|
||||||
|
if isinstance(res, dict) and "cookies" in res:
|
||||||
|
res = res["cookies"]
|
||||||
|
|
||||||
if isinstance(res, list):
|
if isinstance(res, list):
|
||||||
return {x["name"]: x["value"] for x in res}
|
return {x["name"]: x["value"] for x in res}
|
||||||
if isinstance(res, dict):
|
if isinstance(res, dict):
|
||||||
|
|||||||
Загрузка…
x
Ссылка в новой задаче
Block a user