import os import logging from dotenv import load_dotenv from twscrape import API, gather, set_log_level # Configuration load_dotenv() OUTPUT_FILE = os.getenv("OUTPUT_FILE", "tweets.json") SLEEP_MIN = 5 SLEEP_MAX = 10 LOG_LEVEL = os.getenv("LOG_LEVEL", "INFO").upper() logging.basicConfig(level=LOG_LEVEL, format="%(asctime)s %(levelname)s %(message)s") logger = logging.getLogger(__name__) COOKIES = {} async def main(): api = API() # or API("path-to.db") – default is `accounts.db` # ADD ACCOUNTS (for CLI usage see next readme section) # Option 1. Adding account with cookies (more stable) load_dotenv() cookies = os.getenv("COOKIES") await api.pool.add_account("user3", "pass3", "u3@mail.com", "mail_pass3", cookies=cookies) await api.pool.login_all() # try to login to receive account cookies # NOTE 2: all methods have `raw` version (returns `httpx.Response` object): async for rep in api.search_raw("elon musk"): print(rep.status_code, rep.json()) # rep is `httpx.Response` object # change log level, default info set_log_level("DEBUG") # Tweet & User model can be converted to regular dict or json, e.g.: doc = await api.user_by_id(user_id) # User doc.dict() # -> python dict doc.json() # -> json string if __name__ == "__main__": asyncio.run(main()) if __name__ == "__main__": main()