Created
February 18, 2026 20:42
-
-
Save jhkchan/bce5b51419600f5dbb85379cff5ad37e to your computer and use it in GitHub Desktop.
Cursor Cleanup Script
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/bin/bash | |
| # Cursor Cleanup Script | |
| # Removes histories of non-existent projects/workspaces and compacts the database | |
| # | |
| # What this does: | |
| # 1. Backs up state.vscdb before any changes | |
| # 2. Removes non-existent paths from the recently opened list | |
| # 3. Removes workspace storage directories for non-existent projects | |
| # 4. Removes orphan workspace storage directories (no workspace.json or no folder) | |
| # 5. Cleans up stale inline diff data in cursorDiskKV | |
| # 6. Cleans up file edit history for non-existent paths | |
| # 7. VACUUMs (compacts) the global state.vscdb and removes the stale backup | |
| # | |
| # IMPORTANT: Close Cursor completely before running this script! | |
| set -euo pipefail | |
| CURSOR_DIR="$HOME/Library/Application Support/Cursor" | |
| USER_DIR="$CURSOR_DIR/User" | |
| GLOBAL_STORAGE="$USER_DIR/globalStorage" | |
| WS_STORAGE="$USER_DIR/workspaceStorage" | |
| HISTORY_DIR="$USER_DIR/History" | |
| STATE_DB="$GLOBAL_STORAGE/state.vscdb" | |
| STATE_DB_BACKUP="$GLOBAL_STORAGE/state.vscdb.backup" | |
| BACKUP_DIR="$HOME/cursor-backup-$(date +%Y%m%d-%H%M%S)" | |
| RED='\033[0;31m' | |
| GREEN='\033[0;32m' | |
| YELLOW='\033[1;33m' | |
| NC='\033[0m' | |
| log() { echo -e "${GREEN}[OK]${NC} $1"; } | |
| warn() { echo -e "${YELLOW}[WARN]${NC} $1"; } | |
| err() { echo -e "${RED}[ERROR]${NC} $1"; } | |
| # Check Cursor is not running | |
| if pgrep -f "Cursor.app/Contents/MacOS/Cursor" > /dev/null 2>&1; then | |
| err "Cursor is still running! Please close Cursor completely first." | |
| err "You can force quit with: pkill -f 'Cursor.app/Contents/MacOS/Cursor'" | |
| exit 1 | |
| fi | |
| echo "=========================================" | |
| echo " Cursor Cleanup Script" | |
| echo "=========================================" | |
| echo "" | |
| # Show current sizes | |
| echo "Current sizes:" | |
| echo " Global state.vscdb: $(du -sh "$STATE_DB" 2>/dev/null | cut -f1)" | |
| echo " Global state.vscdb.backup: $(du -sh "$STATE_DB_BACKUP" 2>/dev/null | cut -f1)" | |
| echo " Workspace storage: $(du -sh "$WS_STORAGE" 2>/dev/null | cut -f1)" | |
| echo " File history: $(du -sh "$HISTORY_DIR" 2>/dev/null | cut -f1)" | |
| echo " Total Cursor dir: $(du -sh "$CURSOR_DIR" 2>/dev/null | cut -f1)" | |
| echo "" | |
| # Step 1: Backup | |
| echo "Step 1: Creating backup..." | |
| mkdir -p "$BACKUP_DIR" | |
| cp "$STATE_DB" "$BACKUP_DIR/state.vscdb" | |
| log "Backed up state.vscdb to $BACKUP_DIR/state.vscdb" | |
| echo "" | |
| # Step 2: Clean recently opened paths list | |
| echo "Step 2: Cleaning recently opened paths list..." | |
| python3 << 'PYEOF' | |
| import json, os, sqlite3, sys | |
| from urllib.parse import unquote, urlparse | |
| db_path = os.path.expanduser("~/Library/Application Support/Cursor/User/globalStorage/state.vscdb") | |
| conn = sqlite3.connect(db_path) | |
| row = conn.execute("SELECT value FROM ItemTable WHERE key='history.recentlyOpenedPathsList'").fetchone() | |
| if not row: | |
| print(" No recentlyOpenedPathsList found") | |
| conn.close() | |
| sys.exit(0) | |
| data = json.loads(row[0]) | |
| entries = data.get('entries', []) | |
| original_count = len(entries) | |
| kept = [] | |
| removed = 0 | |
| for e in entries: | |
| folder_uri = e.get('folderUri', '') | |
| file_uri = e.get('fileUri', '') | |
| ws_config = e.get('workspace', {}).get('configPath', '') | |
| uri = folder_uri or file_uri or ws_config | |
| if uri and uri.startswith('file://'): | |
| path = unquote(urlparse(uri).path) | |
| if not os.path.exists(path): | |
| removed += 1 | |
| continue | |
| kept.append(e) | |
| data['entries'] = kept | |
| conn.execute("UPDATE ItemTable SET value=? WHERE key='history.recentlyOpenedPathsList'", (json.dumps(data),)) | |
| conn.commit() | |
| conn.close() | |
| print(f" Removed {removed} entries with non-existent paths (kept {len(kept)} of {original_count})") | |
| PYEOF | |
| log "Recently opened list cleaned" | |
| echo "" | |
| # Step 3: Remove workspace storage directories for non-existent projects | |
| echo "Step 3: Removing workspace storage for non-existent projects..." | |
| removed_ws=0 | |
| removed_ws_size=0 | |
| for dir in "$WS_STORAGE"/*/; do | |
| ws_id=$(basename "$dir") | |
| ws_json="$dir/workspace.json" | |
| should_remove=false | |
| if [ -f "$ws_json" ]; then | |
| folder=$(python3 -c " | |
| import json, sys | |
| from urllib.parse import unquote, urlparse | |
| d=json.load(open(sys.argv[1])) | |
| f=d.get('folder','') | |
| if f.startswith('file://'): | |
| print(unquote(urlparse(f).path)) | |
| elif f: | |
| print(f) | |
| else: | |
| print('') | |
| " "$ws_json" 2>/dev/null) | |
| if [ -n "$folder" ]; then | |
| if [ ! -d "$folder" ]; then | |
| should_remove=true | |
| fi | |
| fi | |
| else | |
| # No workspace.json = orphan | |
| should_remove=true | |
| fi | |
| if [ "$should_remove" = true ]; then | |
| size=$(du -sk "$dir" 2>/dev/null | cut -f1) | |
| removed_ws_size=$((removed_ws_size + size)) | |
| rm -rf "$dir" | |
| removed_ws=$((removed_ws + 1)) | |
| fi | |
| done | |
| log "Removed $removed_ws workspace dirs ($(( removed_ws_size / 1024 )) MB)" | |
| echo "" | |
| # Step 4: Clean up inline diff data for gone workspace IDs | |
| echo "Step 4: Cleaning stale inline diff data from global state..." | |
| python3 << 'PYEOF' | |
| import os, sqlite3 | |
| db_path = os.path.expanduser("~/Library/Application Support/Cursor/User/globalStorage/state.vscdb") | |
| ws_base = os.path.expanduser("~/Library/Application Support/Cursor/User/workspaceStorage") | |
| # Get remaining workspace IDs | |
| existing_ws_ids = set() | |
| if os.path.isdir(ws_base): | |
| existing_ws_ids = set(os.listdir(ws_base)) | |
| conn = sqlite3.connect(db_path) | |
| # Find inlineDiff and inlineDiffs entries referencing workspace IDs | |
| rows = conn.execute("SELECT key FROM cursorDiskKV WHERE key LIKE 'inlineDiff:%' OR key LIKE 'inlineDiffs-%'").fetchall() | |
| to_delete = [] | |
| for (key,) in rows: | |
| # Extract workspace ID from key | |
| if key.startswith('inlineDiff:'): | |
| ws_id = key[11:43] # 32-char hex workspace ID | |
| elif key.startswith('inlineDiffs-'): | |
| ws_id = key[12:44] | |
| else: | |
| continue | |
| if ws_id not in existing_ws_ids: | |
| to_delete.append(key) | |
| if to_delete: | |
| conn.executemany("DELETE FROM cursorDiskKV WHERE key=?", [(k,) for k in to_delete]) | |
| conn.commit() | |
| conn.close() | |
| print(f" Removed {len(to_delete)} stale inline diff entries") | |
| PYEOF | |
| log "Stale inline diffs cleaned" | |
| echo "" | |
| # Step 5: Clean up file history for non-existent files | |
| echo "Step 5: Cleaning file history for non-existent files..." | |
| if [ -d "$HISTORY_DIR" ]; then | |
| removed_hist=0 | |
| removed_hist_size=0 | |
| for hist_dir in "$HISTORY_DIR"/*/; do | |
| [ -d "$hist_dir" ] || continue | |
| entries_json="$hist_dir/entries.json" | |
| if [ -f "$entries_json" ]; then | |
| # Check if the original file path still exists | |
| orig_path=$(python3 -c " | |
| import json, sys | |
| from urllib.parse import unquote, urlparse | |
| try: | |
| d=json.load(open(sys.argv[1])) | |
| r=d.get('resource','') | |
| if r.startswith('file://'): | |
| print(unquote(urlparse(r).path)) | |
| else: | |
| print(r) | |
| except: | |
| print('') | |
| " "$entries_json" 2>/dev/null) | |
| if [ -n "$orig_path" ] && [ ! -e "$orig_path" ]; then | |
| size=$(du -sk "$hist_dir" 2>/dev/null | cut -f1) | |
| removed_hist_size=$((removed_hist_size + size)) | |
| rm -rf "$hist_dir" | |
| removed_hist=$((removed_hist + 1)) | |
| fi | |
| fi | |
| done | |
| log "Removed $removed_hist file history dirs ($(( removed_hist_size / 1024 )) MB)" | |
| else | |
| warn "No History directory found" | |
| fi | |
| echo "" | |
| # Step 6: Remove stale state.vscdb.backup (it's a duplicate of the bloated DB) | |
| echo "Step 6: Removing stale state.vscdb.backup..." | |
| if [ -f "$STATE_DB_BACKUP" ]; then | |
| backup_size=$(du -sh "$STATE_DB_BACKUP" | cut -f1) | |
| rm -f "$STATE_DB_BACKUP" | |
| log "Removed state.vscdb.backup ($backup_size)" | |
| else | |
| warn "No backup file found" | |
| fi | |
| echo "" | |
| # Step 7: VACUUM the database | |
| echo "Step 7: Compacting (VACUUM) state.vscdb..." | |
| before_size=$(du -sh "$STATE_DB" | cut -f1) | |
| sqlite3 "$STATE_DB" "VACUUM;" | |
| after_size=$(du -sh "$STATE_DB" | cut -f1) | |
| log "Compacted state.vscdb: $before_size -> $after_size" | |
| echo "" | |
| # Also vacuum workspace state databases | |
| echo "Step 8: Compacting remaining workspace databases..." | |
| compacted=0 | |
| for db in "$WS_STORAGE"/*/state.vscdb; do | |
| [ -f "$db" ] || continue | |
| sqlite3 "$db" "VACUUM;" 2>/dev/null && compacted=$((compacted + 1)) | |
| done | |
| log "Compacted $compacted workspace databases" | |
| echo "" | |
| # Final summary | |
| echo "=========================================" | |
| echo " Cleanup Complete!" | |
| echo "=========================================" | |
| echo "" | |
| echo "Final sizes:" | |
| echo " Global state.vscdb: $(du -sh "$STATE_DB" 2>/dev/null | cut -f1)" | |
| echo " Workspace storage: $(du -sh "$WS_STORAGE" 2>/dev/null | cut -f1)" | |
| echo " File history: $(du -sh "$HISTORY_DIR" 2>/dev/null | cut -f1)" | |
| echo " Total Cursor dir: $(du -sh "$CURSOR_DIR" 2>/dev/null | cut -f1)" | |
| echo "" | |
| echo "Backup saved to: $BACKUP_DIR" | |
| echo "You can delete the backup after verifying Cursor works correctly:" | |
| echo " rm -rf $BACKUP_DIR" | |
| echo "" | |
| echo "You can now reopen Cursor." |
Author
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment
Tested on my Mac