Last active
July 6, 2025 08:06
-
-
Save mnishiguchi/15c33add36535ebf60f5e006b6bde94c to your computer and use it in GitHub Desktop.
batch download of N hours of Himawari IR/VIS images
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env bash | |
| # | |
| # fetch_earth_images.sh | |
| # Interactive batch download of Earth Wallpaper, IR & VIS images. | |
| # ----------------------------------------------- | |
| set -euo pipefail | |
| echo_heading() { echo -e "\n\033[34m$1\033[0m"; } | |
| echo_success() { echo -e " \033[32m✔ $1\033[0m"; } | |
| echo_warning() { echo -e " \033[33m⚠ $1\033[0m"; } | |
| echo_failure() { echo -e "\033[31m✖ $1\033[0m"; } | |
| usage() { | |
| cat <<EOF | |
| Usage: $0 [ -c COUNT ] [ -o OUTPUT_DIR ] [ -y ] | |
| Options: | |
| -c, --count COUNT Number of hourly fetches (default: 24) | |
| -o, --output DIR Output directory (supports ~/ and \$VAR; default: ./earth_images_<timestamp>) | |
| -y, --yes Skip final confirmation | |
| -h, --help Show this help | |
| Always fetches: wallpaper, ir, and vis. | |
| EOF | |
| } | |
| # ── Defaults & flag parsing ────────────────────────────────────────────────── | |
| DEFAULT_COUNT=24 | |
| TS_SUFFIX=$(date -u +%Y%m%d_%H%M%SZ) | |
| DEFAULT_OUTPUT="./earth_images_${TS_SUFFIX}" | |
| SKIP_CONFIRM=0 | |
| FLAGS_PROVIDED=0 | |
| while (( "$#" )); do | |
| case "$1" in | |
| -c|--count) COUNT=$2; FLAGS_PROVIDED=1; shift 2 ;; | |
| -o|--output) OUTPUT_RAW=$2; FLAGS_PROVIDED=1; shift 2 ;; | |
| -y|--yes) SKIP_CONFIRM=1; shift ;; | |
| -h|--help) usage; exit 0 ;; | |
| --) shift; break ;; | |
| -*) echo_failure "Unknown option: $1"; usage; exit 1 ;; | |
| *) break ;; | |
| esac | |
| done | |
| if [[ $FLAGS_PROVIDED -eq 0 ]]; then | |
| echo_heading "How many times to fetch the latest images? (once per hour)" | |
| echo "Default: $DEFAULT_COUNT" | |
| read -rp "> " COUNT | |
| COUNT=${COUNT:-$DEFAULT_COUNT} | |
| echo_heading "Output directory (supports ~/ and \$VAR)" | |
| echo "Default: $DEFAULT_OUTPUT" | |
| read -rp "> " OUTPUT_RAW | |
| OUTPUT_RAW=${OUTPUT_RAW:-$DEFAULT_OUTPUT} | |
| else | |
| COUNT=${COUNT:-$DEFAULT_COUNT} | |
| OUTPUT_RAW=${OUTPUT_RAW:-$DEFAULT_OUTPUT} | |
| fi | |
| # ── Prepare output ─────────────────────────────────────────────────────────── | |
| OUTPUT_DIR=$(eval echo "$OUTPUT_RAW") | |
| mkdir -p "$OUTPUT_DIR" | |
| echo_success "Saving files to: $OUTPUT_DIR" | |
| if [[ $SKIP_CONFIRM -eq 0 ]]; then | |
| echo_heading "Summary" | |
| echo "Count : $COUNT" | |
| echo "Output dir : $OUTPUT_DIR" | |
| echo -n "Proceed? [Y/n] " | |
| read -rn1 CONF; echo | |
| [[ "$CONF" =~ ^[Nn]$ ]] && { echo_failure "Aborted."; exit 1; } | |
| else | |
| echo_heading "Auto-confirm enabled; proceeding." | |
| fi | |
| # ── Base URLs ───────────────────────────────────────────────────────────────── | |
| BASE_API_URL="https://agora.ex.nii.ac.jp/digital-typhoon" | |
| WALLPAPER_GLOBE_URL="${BASE_API_URL}/wallpaper/globe" | |
| LATEST_GLOBE_URL="${BASE_API_URL}/latest/globe" | |
| # ── Always fetch these three types ──────────────────────────────────────────── | |
| TYPES=(wallpaper ir vis) | |
| # ── Main loop ─────────────────────────────────────────────────────────────── | |
| for ((i=1; i<=COUNT; i++)); do | |
| NOW=$(date -u +%Y%m%d%H) | |
| echo_heading "[$i/$COUNT] Fetching at UTC $NOW" | |
| for t in "${TYPES[@]}"; do | |
| # choose sizes | |
| if [[ $t == "wallpaper" ]]; then | |
| SIZES=(1024x768) | |
| else | |
| SIZES=(512x512 2048x2048) | |
| fi | |
| for sz in "${SIZES[@]}"; do | |
| # build URL | |
| if [[ $t == "wallpaper" ]]; then | |
| IMAGE_URL="${WALLPAPER_GLOBE_URL}/${sz}/latest.jpg" | |
| else | |
| IMAGE_URL="${LATEST_GLOBE_URL}/${sz}/${t}.jpg" | |
| fi | |
| OUT_FILE="${OUTPUT_DIR}/${t}_${sz}_${NOW}.jpg" | |
| # skip exact‐duplicate | |
| if [[ -f "$OUT_FILE" ]]; then | |
| echo_warning " → ${t^^} $sz … already exists, skipping" | |
| continue | |
| fi | |
| echo -n " → ${t^^} $sz … " | |
| # find last frame for conditional GET | |
| LAST_FRAME=$(ls -1 "${OUTPUT_DIR}/${t}_${sz}_"*.jpg 2>/dev/null | tail -n1 || :) | |
| TMP_FILE=$(mktemp) | |
| if [[ -n "$LAST_FRAME" ]]; then | |
| curl -sSf -z "$LAST_FRAME" -o "$TMP_FILE" "$IMAGE_URL" || { | |
| echo_warning "failed"; rm -f "$TMP_FILE"; continue | |
| } | |
| else | |
| curl -sSf -o "$TMP_FILE" "$IMAGE_URL" || { | |
| echo_warning "failed"; rm -f "$TMP_FILE"; continue | |
| } | |
| fi | |
| if [[ -s "$TMP_FILE" ]]; then | |
| mv "$TMP_FILE" "$OUT_FILE" | |
| echo_success "$(basename "$OUT_FILE")" | |
| else | |
| echo_warning "no update" | |
| rm -f "$TMP_FILE" | |
| fi | |
| done | |
| done | |
| # sleep if more iterations remain | |
| if (( i < COUNT )); then | |
| echo -e "\nSleeping for 1 hour…" | |
| sleep 3600 | |
| fi | |
| done | |
| echo_success "Done! Images saved under $OUTPUT_DIR." |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Duplicate-Avoidance Strategy (Earth-Image Fetch Script)
The script prevents re-downloading identical frames by combining two independent checks:
Layer 1 — Timestamped Filenames (Pre-Fetch Skip)
Examples:
ir_512x512_2025070615.jpg,wallpaper_1024x768_2025070615.jpgcurlcall, the script buildsOUT_FILEfor the current UTC hour.Layer 2 — HTTP Conditional GET (If-Modified-Since)
LAST_FRAME=$(ls -1 "${OUTPUT_DIR}/${type}_${size}_"*.jpg | tail -n1)curl -z <file>explainedReads
<file>’s modification time.Sends request header:
Server compares this to its own
Last-Modifiedtime:304 Not Modified→curlwrites 0 bytes.200 OKwith new JPEG.Script guard
Practical Flow
Does
${type}_${size}_${hour}.jpgalready exist?Yes → skip completely.
Else—request with
-z LAST_FRAME.Server unchanged → 0-byte file → discard.
Server newer → save new JPEG.
This two-layer approach means:
Quick Checklist
curlwith-z <last_local_file>.[[ -s "$TMP_FILE" ]]before moving to final location.Copy these patterns whenever you need safe, incremental downloads.