Created
December 21, 2025 20:56
-
-
Save leighklotz/3e5f612e087be3443b699b2f8b9b6b20 to your computer and use it in GitHub Desktop.
Graph count of earthquakes over time
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| # https://chatgpt.com/share/69485ed1-f5e8-800c-8396-70818473789c | |
| """ | |
| Weekly count of earthquakes M>=3.0 in Northern California for 2025. | |
| Data source: USGS Earthquake Catalog (ComCat) FDSN Event Web Service. | |
| https://earthquake.usgs.gov/fdsnws/event/1/ | |
| This script: | |
| - Queries events in a lat/lon bounding box for 2025 | |
| - Filters to California-labeled events (place contains ", CA" or "California") | |
| and excludes "Nevada" (because the box touches the border) | |
| - Bins events by week starting Monday | |
| - Outputs: | |
| - norcal_earthquakes_m3_weekly_2025.csv | |
| - norcal_earthquakes_m3_weekly_2025.png | |
| """ | |
| from __future__ import annotations | |
| import argparse | |
| import csv | |
| import json | |
| import sys | |
| import urllib.parse | |
| import urllib.request | |
| from dataclasses import dataclass | |
| from datetime import datetime, timedelta, timezone, date | |
| from typing import Any, Dict, List, Tuple | |
| # Optional plotting (only needed if you want the PNG) | |
| try: | |
| import matplotlib.pyplot as plt # type: ignore | |
| except Exception: | |
| plt = None | |
| USGS_ENDPOINT = "https://earthquake.usgs.gov/fdsnws/event/1/query" | |
| @dataclass | |
| class Config: | |
| starttime: str = "2025-01-01" | |
| endtime: str = "2026-01-01" # end is exclusive-ish; using Jan 1, 2026 captures all of 2025 | |
| minmagnitude: float = 3.0 | |
| # "Northern California" bounding box (edit these to taste) | |
| minlatitude: float = 37.0 | |
| maxlatitude: float = 42.5 | |
| minlongitude: float = -124.7 | |
| maxlongitude: float = -117.8 | |
| # Output | |
| csv_out: str = "norcal_earthquakes_m3_weekly_2025.csv" | |
| png_out: str = "norcal_earthquakes_m3_weekly_2025.png" | |
| title: str = "Earthquakes M≥3.0 in Northern California (by week) — 2025" | |
| def fetch_usgs_events(cfg: Config) -> Dict[str, Any]: | |
| params = { | |
| "format": "geojson", | |
| "starttime": cfg.starttime, | |
| "endtime": cfg.endtime, | |
| "minmagnitude": str(cfg.minmagnitude), | |
| "minlatitude": str(cfg.minlatitude), | |
| "maxlatitude": str(cfg.maxlatitude), | |
| "minlongitude": str(cfg.minlongitude), | |
| "maxlongitude": str(cfg.maxlongitude), | |
| "orderby": "time-asc", | |
| } | |
| url = USGS_ENDPOINT + "?" + urllib.parse.urlencode(params) | |
| req = urllib.request.Request( | |
| url, | |
| headers={ | |
| # Friendly UA (USGS requests one) | |
| "User-Agent": "norcal-weekly-m3-script/1.0 (python urllib.request)", | |
| "Accept": "application/json", | |
| }, | |
| ) | |
| with urllib.request.urlopen(req, timeout=60) as resp: | |
| if resp.status != 200: | |
| raise RuntimeError(f"USGS request failed: HTTP {resp.status}") | |
| return json.loads(resp.read().decode("utf-8")) | |
| def week_start_monday(d: date) -> date: | |
| """Return the Monday date for the week containing d.""" | |
| return d - timedelta(days=d.weekday()) | |
| def parse_events(data: Dict[str, Any]) -> List[Tuple[datetime, float, str]]: | |
| out: List[Tuple[datetime, float, str]] = [] | |
| for feat in data.get("features", []): | |
| props = feat.get("properties", {}) or {} | |
| mag = props.get("mag", None) | |
| place = props.get("place", "") or "" | |
| t_ms = props.get("time", None) | |
| if mag is None or t_ms is None: | |
| continue | |
| # USGS time is epoch milliseconds UTC | |
| t = datetime.fromtimestamp(t_ms / 1000.0, tz=timezone.utc) | |
| out.append((t, float(mag), str(place))) | |
| return out | |
| def filter_ca(events: List[Tuple[datetime, float, str]]) -> List[Tuple[datetime, float, str]]: | |
| """ | |
| Keep events whose 'place' indicates California and exclude obvious Nevada spillover. | |
| This is a heuristic; adjust if you want a different definition. | |
| """ | |
| kept = [] | |
| for t, mag, place in events: | |
| place_lower = place.lower() | |
| is_ca = (", ca" in place_lower) or ("california" in place_lower) | |
| is_nv = "nevada" in place_lower | |
| if is_ca and not is_nv: | |
| kept.append((t, mag, place)) | |
| return kept | |
| def bin_weekly(events: List[Tuple[datetime, float, str]], year: int = 2025) -> List[Tuple[date, int]]: | |
| """ | |
| Return list of (week_start_date, count) covering all Monday weeks that intersect the year. | |
| """ | |
| # Monday of the week containing Jan 1 | |
| start = week_start_monday(date(year, 1, 1)) | |
| # Monday of the week containing Dec 31 | |
| end = week_start_monday(date(year, 12, 31)) | |
| # Build the full sequence of Monday starts | |
| weeks: List[date] = [] | |
| d = start | |
| while d <= end: | |
| weeks.append(d) | |
| d = d + timedelta(days=7) | |
| counts = {ws: 0 for ws in weeks} | |
| for t, _, _ in events: | |
| ws = week_start_monday(t.date()) | |
| # Only count bins that are part of the weeks list | |
| if ws in counts: | |
| counts[ws] += 1 | |
| return [(ws, counts[ws]) for ws in weeks] | |
| def write_csv(rows: List[Tuple[date, int]], path: str) -> None: | |
| with open(path, "w", newline="", encoding="utf-8") as f: | |
| w = csv.writer(f) | |
| w.writerow(["week_start", "count"]) | |
| for ws, c in rows: | |
| w.writerow([ws.isoformat(), c]) | |
| def plot_png(rows: List[Tuple[date, int]], path: str, title: str) -> None: | |
| if plt is None: | |
| raise RuntimeError("matplotlib is not installed; install it or run with --no-plot") | |
| xs = [datetime.combine(ws, datetime.min.time(), tzinfo=timezone.utc) for ws, _ in rows] | |
| ys = [c for _, c in rows] | |
| plt.figure(figsize=(12, 4)) | |
| plt.plot(xs, ys) | |
| plt.title(title) | |
| plt.xlabel("Week start (Mon)") | |
| plt.ylabel("Count") | |
| plt.tight_layout() | |
| plt.savefig(path, dpi=200) | |
| def main(argv: List[str]) -> int: | |
| ap = argparse.ArgumentParser() | |
| ap.add_argument("--minmag", type=float, default=3.0, help="Minimum magnitude (default 3.0)") | |
| ap.add_argument("--no-plot", action="store_true", help="Skip generating the PNG plot") | |
| ap.add_argument("--csv", default="norcal_earthquakes_m3_weekly_2025.csv", help="CSV output path") | |
| ap.add_argument("--png", default="norcal_earthquakes_m3_weekly_2025.png", help="PNG output path") | |
| ap.add_argument("--year", type=int, default=2025, help="Year to bin (default 2025)") | |
| args = ap.parse_args(argv) | |
| cfg = Config( | |
| minmagnitude=args.minmag, | |
| csv_out=args.csv, | |
| png_out=args.png, | |
| title=f"Earthquakes M≥{args.minmag:.1f} in Northern California (by week) — {args.year}", | |
| starttime=f"{args.year}-01-01", | |
| endtime=f"{args.year + 1}-01-01", | |
| ) | |
| data = fetch_usgs_events(cfg) | |
| events = parse_events(data) | |
| events = filter_ca(events) | |
| weekly = bin_weekly(events, year=args.year) | |
| total = sum(c for _, c in weekly) | |
| peak_ws, peak_c = max(weekly, key=lambda x: x[1]) if weekly else (None, 0) | |
| write_csv(weekly, cfg.csv_out) | |
| if not args.no_plot: | |
| plot_png(weekly, cfg.png_out, cfg.title) | |
| print(f"Wrote CSV: {cfg.csv_out}") | |
| if not args.no_plot: | |
| print(f"Wrote PNG: {cfg.png_out}") | |
| print(f"Total events counted: {total}") | |
| if peak_ws is not None: | |
| print(f"Peak week start: {peak_ws.isoformat()} count={peak_c}") | |
| return 0 | |
| if __name__ == "__main__": | |
| raise SystemExit(main(sys.argv[1:])) |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment