Skip to content

Instantly share code, notes, and snippets.

@cmj
Created December 5, 2025 15:42
Show Gist options
  • Select an option

  • Save cmj/8c8a2f621016a132044ebe71f6a8268f to your computer and use it in GitHub Desktop.

Select an option

Save cmj/8c8a2f621016a132044ebe71f6a8268f to your computer and use it in GitHub Desktop.
Pretty print downdetector.com
#!/usr/bin/python3
import requests
import cloudscraper
from bs4 import BeautifulSoup
# use cloudscraper + stealth_mode, print company with
# outages reported within last hour or day (see below)
scraper = cloudscraper.create_scraper(
interpreter='js2py',
delay=5,
enable_stealth=True,
stealth_options={
'min_delay': 2.0,
'max_delay': 6.0,
'human_like_delays': True,
'randomize_headers': True,
'browser_quirks': True
},
)
soup = BeautifulSoup(scraper.get("https://downdetector.com/").content, "html.parser")
comp = soup.find(class_='companies')
sites = comp.find_all('div', {'class' : 'company-index'})
out = ""
for info in sites[:15]:
name = info.find('h5')
reports = info.get('data-hour') # [data-hour|data-day]
out += name.contents[0] + "(" + reports + ") • "
out = out[:len(out) - 3]
print(str(out))
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment