Files
Monkeycrawl/main.py
fzzinchemical a246279426 Add environment configuration and update main functionality
- Introduced .env.local for environment variables
- Updated .gitignore to include new files
- Enhanced main.py to utilize environment variables for user credentials
- Implemented random sleep delay in API calls and output JSON formatting
2025-11-13 19:56:28 +01:00

49 lines
1.5 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
import asyncio
import random
from twscrape import API
import os
import json
import logging
from dotenv import load_dotenv
# Configuration
load_dotenv()
OUTPUT_FILE = os.getenv("OUTPUT_FILE", "tweets.json")
logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s")
logger = logging.getLogger(__name__)
COOKIES = os.getenv("COOKIES")
async def main():
api = API() # or API("path-to.db") default is `accounts.db`
# ADD ACCOUNTS (for CLI usage see next readme section)
# Option 1. Adding account with cookies (more stable)
load_dotenv()
cookies = os.getenv("COOKIES")
username = os.getenv("USERNAME")
password = os.getenv("PASSWORD")
email = os.getenv("EMAIL")
email_password = os.getenv("EMAIL_PASSWORD")
await api.pool.add_account(username, password, email, email_password, cookies=cookies)
await api.pool.login_all() # try to login to receive account cookies
# NOTE 2: all methods have `raw` version (returns `httpx.Response` object):
async for rep in api.search("AI"):
_results = []
try:
_results.append(rep.json())
except Exception:
_results.append(rep.text)
await asyncio.sleep(random.uniform(7, 15)) # random delay between 7 and 15 seconds
with open(OUTPUT_FILE, "w", encoding="utf-8") as f:
json.dump({"tweets": _results}, f, ensure_ascii=False, indent=2)
if __name__ == "__main__":
asyncio.run(main())