Skip to content

Instantly share code, notes, and snippets.

@bulletinmybeard
Created December 6, 2025 19:07
Show Gist options
  • Select an option

  • Save bulletinmybeard/c5d3afe774fc984721dbccfc764ace97 to your computer and use it in GitHub Desktop.

Select an option

Save bulletinmybeard/c5d3afe774fc984721dbccfc764ace97 to your computer and use it in GitHub Desktop.
GitHub Gists Downloader - Sync all gists for a user to a local directory (cross-platform)
#!/bin/bash
#
# download_gists.sh - GitHub Gists Downloader
# ============================================
#
# Fetches and syncs all gists for a GitHub user to a local directory.
# Only downloads files that are new or have been updated remotely.
#
# Features:
# - Cross-platform (macOS & Linux)
# - GitHub API rate limit awareness
# - Incremental updates (only downloads changed files)
# - Pagination support for users with many gists
#
# Prerequisites:
# - curl
# - jq
# - GitHub API token with 'gist' scope
#
# Usage:
# ./download_gists.sh <username> <bearer_token>
#
# Example:
# ./download_gists.sh bulletinmybeard ghp_xxxxxxxxxxxx
#
# License: MIT (c) 2024 bulletinmybeard
#
set -e
# =============================================================================
# Help & Usage
# =============================================================================
show_help() {
cat << 'EOF'
GitHub Gists Downloader
=======================
Downloads and syncs all gists for a specified GitHub user.
USAGE
./download_gists.sh <username> <bearer_token>
./download_gists.sh -h | --help
ARGUMENTS
<username> GitHub username whose gists to download
<bearer_token> GitHub API token with 'gist' scope
OPTIONS
-h, --help Show this help message and exit
PREREQUISITES
- curl For API requests and file downloads
- jq For JSON parsing
OUTPUT
Files are saved to: ./exports/<username>/
EXAMPLES
# Download all gists for user 'bulletinmybeard'
./download_gists.sh bulletinmybeard ghp_xxxxxxxxxxxx
# Check remaining API requests without downloading
# (just run the script - it shows rate limit info first)
RATE LIMITS
GitHub API allows 5000 requests/hour for authenticated users.
The script displays remaining requests before processing.
LICENSE
MIT License (c) 2024 Robin Schulz
EOF
exit 0
}
# =============================================================================
# Cross-Platform Detectio
# =============================================================================
detect_platform() {
case "$(uname -s)" in
Darwin*)
PLATFORM="macos"
;;
Linux*)
PLATFORM="linux"
;;
*)
echo "Error: Unsupported platform '$(uname -s)'"
exit 1
;;
esac
}
# Decode base64
decode_base64() {
if [[ "$PLATFORM" == "macos" ]]; then
base64 -D
else
base64 -d
fi
}
# Convert Unix timestamp to human-readable date
timestamp_to_date() {
local timestamp=$1
if [[ "$PLATFORM" == "macos" ]]; then
date -r "$timestamp" '+%F %T'
else
date -d "@$timestamp" '+%F %T'
fi
}
# Parse ISO 8601 date to Unix timestamp
iso_to_timestamp() {
local iso_date=$1
if [[ "$PLATFORM" == "macos" ]]; then
date -jf "%Y-%m-%dT%H:%M:%SZ" "$iso_date" +%s
else
date -d "$iso_date" +%s
fi
}
# Get file modification time as Unix timestamp
file_mtime() {
local file=$1
if [[ "$PLATFORM" == "macos" ]]; then
stat -f %m "$file"
else
stat -c %Y "$file"
fi
}
# =============================================================================
# Argument Parsing
# =============================================================================
# Check for help flag
if [[ "$1" == "-h" ]] || [[ "$1" == "--help" ]]; then
show_help
fi
# Validate required arguments
if [[ -z "$1" ]] || [[ -z "$2" ]]; then
echo "Error: Missing required arguments."
echo ""
echo "Usage: $0 <username> <bearer_token>"
echo " $0 --help"
exit 1
fi
# Detect platform
detect_platform
username="${1}"
bearer_token="${2}"
# Create export directory
export_dir="./exports/${username}"
mkdir -p "$export_dir"
# =============================================================================
# GitHub API Functions
# =============================================================================
# cURL helper function
run_curl_cmd() {
local url=$1
curl -sS \
-H "Accept: application/vnd.github+json" \
-H "Authorization: Bearer ${bearer_token}" \
-H "X-GitHub-Api-Version: 2022-11-28" \
"$url"
}
# Fetch rate limit info
fetch_rate_limit() {
local rate_limit_url="https://api.github.com/rate_limit"
local response
response=$(run_curl_cmd "$rate_limit_url")
echo "$response" | jq '.rate'
}
# Calculate time until rate limit reset
calculate_time_until_reset() {
local reset_timestamp=$1
local current_timestamp
current_timestamp=$(date +%s)
local diff_seconds=$((reset_timestamp - current_timestamp))
if [[ $diff_seconds -lt 0 ]]; then
echo "Reset time has passed"
return
fi
local hours minutes seconds
hours=$(printf "%02d" $((diff_seconds / 3600)))
minutes=$(printf "%02d" $(( (diff_seconds % 3600) / 60 )))
seconds=$(printf "%02d" $(( diff_seconds % 60 )))
echo "${hours}:${minutes}:${seconds}"
}
# =============================================================================
# Rate Limit Check
# =============================================================================
rate_limit_info=$(fetch_rate_limit)
request_limit=$(echo "$rate_limit_info" | jq '.limit')
remaining_requests=$(echo "$rate_limit_info" | jq '.remaining')
limit_reset=$(echo "$rate_limit_info" | jq '.reset')
limit_reset_datetime=$(timestamp_to_date "$limit_reset")
time_until_reset=$(calculate_time_until_reset "$limit_reset")
if [[ "$remaining_requests" -ne 0 ]]; then
printf "\n✓ API Rate Limit: %s/%s requests remaining\n" "$remaining_requests" "$request_limit"
printf " Reset at: %s (in %s)\n" "$limit_reset_datetime" "$time_until_reset"
printf "================================================\n"
else
echo "✗ API rate limit exceeded ($request_limit requests)."
echo " Resets in: $time_until_reset ($limit_reset_datetime)"
exit 1
fi
# =============================================================================
# Gist Processing
# =============================================================================
# Fetch and process paginated gists
fetch_paginated_gists() {
local page=$1
local url="https://api.github.com/users/${username}/gists?page=${page}"
local gists
gists=$(run_curl_cmd "$url")
local gist_count
gist_count=$(echo "$gists" | jq '. | length')
if [[ "$gist_count" -eq 0 ]]; then
echo "end"
return
fi
echo "$gists" | jq -r '.[] | {url: .files | to_entries[] | .value.raw_url, updated_at} | @base64' | \
while IFS= read -r line; do
# Decode the JSON string
local json
json=$(echo "$line" | decode_base64)
# Extract URL and updated_at
local url updated_at
url=$(echo "$json" | jq -r '.url')
updated_at=$(echo "$json" | jq -r '.updated_at')
# Convert to Unix timestamp
updated_at=$(iso_to_timestamp "$updated_at")
# Extract filename and create path
local filename file_path
filename=$(basename "$url")
file_path="${export_dir}/${filename}"
# Check if file exists and compare timestamps
if [[ -f "$file_path" ]]; then
local local_mtime
local_mtime=$(file_mtime "$file_path")
if [[ "$local_mtime" -lt "$updated_at" ]]; then
echo " ↻ Updating: ${filename}"
curl -sS -o "$file_path" "$url"
else
echo " ✓ Up to date: ${filename}"
fi
else
echo " ↓ Downloading: ${filename}"
curl -sS -o "$file_path" "$url"
fi
done
}
# =============================================================================
# Main Loop
# =============================================================================
page=1
while true; do
page_result=$(fetch_paginated_gists "$page")
if [[ -n "$page_result" ]] && [[ "$page_result" != *"end"* ]]; then
printf "\nPage %s\n-------\n" "$page"
echo "$page_result"
((page++))
else
printf "\n✓ All gists processed! Files saved to: %s/\n" "$export_dir"
break
fi
done
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment