import asyncio import random from twscrape import API import os import json import logging from dotenv import load_dotenv # Configuration load_dotenv() OUTPUT_FILE = os.getenv("OUTPUT_FILE", "tweets.json") logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") logger = logging.getLogger(__name__) COOKIES = os.getenv("COOKIES") async def main(): api = API() # or API("path-to.db") – default is `accounts.db` # ADD ACCOUNTS (for CLI usage see next readme section) # Option 1. Adding account with cookies (more stable) load_dotenv() cookies = os.getenv("COOKIES") username = os.getenv("USERNAME") password = os.getenv("PASSWORD") email = os.getenv("EMAIL") email_password = os.getenv("EMAIL_PASSWORD") await api.pool.add_account(username, password, email, email_password, cookies=cookies) await api.pool.login_all() # try to login to receive account cookies # NOTE 2: all methods have `raw` version (returns `httpx.Response` object): async for rep in api.search("AI"): _results = [] try: _results.append(rep.json()) except Exception: _results.append(rep.text) await asyncio.sleep(random.uniform(7, 15)) # random delay between 7 and 15 seconds with open(OUTPUT_FILE, "w", encoding="utf-8") as f: json.dump({"tweets": _results}, f, ensure_ascii=False, indent=2) if __name__ == "__main__": asyncio.run(main())