# main.py
import os
import asyncio
import requests
from crawl4ai import *
async def main():
# 1) Create a Browser Cash session and get its CDP URL
resp = requests.post(
"https://api.browser.cash/v1/browser/session",
headers={
"Authorization": f"Bearer {os.getenv('BROWSER_CASH_API_KEY')}",
"content-type": "application/json",
},
json={},
)
resp.raise_for_status()
cdp_url = resp.json()["cdpUrl"]
# 2) Configure Crawl4AI to use the Browser Cash browser
browser_config = BrowserConfig(
cdp_url=cdp_url,
use_managed_browser=True, # Enable managed browser mode
)
# 3) Run the crawler with the Browser Cash browser
async with AsyncWebCrawler(config=browser_config) as crawler:
result = await crawler.arun(
url="https://www.nbcnews.com/business",
)
print(result.markdown)
# 4) Stop the Browser Cash session
requests.delete(
"https://api.browser.cash/v1/browser/session",
headers={
"Authorization": f"Bearer {os.getenv('BROWSER_CASH_API_KEY')}",
},
params={"sessionId": resp.json()["sessionId"]},
)
if __name__ == "__main__":
asyncio.run(main())