Last active
April 22, 2025 00:47
-
-
Save alessandrobologna/6e19b90d183d8d88723fcb707cc0c335 to your computer and use it in GitHub Desktop.
script to delete honeycomb datasets
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| # /// script | |
| # requires-python = ">=3.13" | |
| # dependencies = [ | |
| # "requests<3", | |
| # "click>=8.0.0", | |
| # "rich>=13.0.0", | |
| # ] | |
| # /// | |
| """ | |
| Honeycomb Dataset Management CLI | |
| A tool to manage Honeycomb datasets with commands to list, describe, and delete datasets. | |
| Environment variables: | |
| HONEYCOMB_API_KEY: Your Honeycomb API key | |
| """ | |
| import os | |
| import sys | |
| import fnmatch | |
| import requests | |
| import json | |
| import time | |
| from typing import List, Dict, Any, Optional | |
| import click | |
| from rich.console import Console | |
| from rich.table import Table | |
| from rich.panel import Panel | |
| from rich.markdown import Markdown | |
| from rich import box | |
| # API endpoints | |
| HONEYCOMB_API_BASE = "https://api.honeycomb.io" | |
| DATASETS_ENDPOINT = f"{HONEYCOMB_API_BASE}/1/datasets" | |
| # Initialize Rich console | |
| console = Console() | |
| def get_api_key() -> str: | |
| """Get the Honeycomb API key from environment variables.""" | |
| api_key = os.environ.get("HONEYCOMB_API_KEY") | |
| if not api_key: | |
| console.print("[red]Error: HONEYCOMB_API_KEY environment variable is not set.[/red]") | |
| sys.exit(1) | |
| return api_key | |
| def get_headers(api_key: str) -> Dict[str, str]: | |
| """Create headers for Honeycomb API requests.""" | |
| return { | |
| "X-Honeycomb-Team": api_key, | |
| "Content-Type": "application/json" | |
| } | |
| def list_datasets(api_key: str) -> List[Dict[str, Any]]: | |
| """List all datasets in the Honeycomb team.""" | |
| headers = get_headers(api_key) | |
| response = requests.get(DATASETS_ENDPOINT, headers=headers) | |
| if response.status_code != 200: | |
| console.print(f"[red]Error listing datasets: {response.status_code} - {response.text}[/red]") | |
| sys.exit(1) | |
| return response.json() | |
| def get_dataset(api_key: str, dataset_slug: str) -> Optional[Dict[str, Any]]: | |
| """Get detailed information about a specific dataset.""" | |
| headers = get_headers(api_key) | |
| url = f"{DATASETS_ENDPOINT}/{dataset_slug}" | |
| response = requests.get(url, headers=headers) | |
| if response.status_code != 200: | |
| console.print(f"[red]Error getting dataset {dataset_slug}: {response.status_code} - {response.text}[/red]") | |
| return None | |
| return response.json() | |
| def update_dataset_protection(api_key: str, dataset_slug: str, protected: bool) -> bool: | |
| """Update the delete protection setting for a dataset.""" | |
| headers = get_headers(api_key) | |
| url = f"{DATASETS_ENDPOINT}/{dataset_slug}" | |
| # First, get the current dataset settings | |
| dataset = get_dataset(api_key, dataset_slug) | |
| if not dataset: | |
| return False | |
| # Prepare the update payload | |
| payload = { | |
| "name": dataset.get("name", ""), | |
| "description": dataset.get("description", ""), | |
| "settings": { | |
| "delete_protected": protected | |
| } | |
| } | |
| # Update the dataset | |
| response = requests.put(url, headers=headers, json=payload) | |
| if response.status_code != 200: | |
| console.print(f"[red]Error updating dataset {dataset_slug}: {response.status_code} - {response.text}[/red]") | |
| return False | |
| return True | |
| def delete_dataset(api_key: str, dataset_slug: str) -> bool: | |
| """Delete a dataset.""" | |
| headers = get_headers(api_key) | |
| url = f"{DATASETS_ENDPOINT}/{dataset_slug}" | |
| response = requests.delete(url, headers=headers) | |
| if response.status_code not in [202, 204]: | |
| console.print(f"[red]Error deleting dataset {dataset_slug}: {response.status_code} - {response.text}[/red]") | |
| return False | |
| return True | |
| def filter_datasets_by_glob(datasets: List[Dict[str, Any]], glob_pattern: str) -> List[Dict[str, Any]]: | |
| """Filter datasets by a glob pattern matching their names.""" | |
| matching_datasets = [] | |
| for dataset in datasets: | |
| name = dataset.get("name", "") | |
| slug = dataset.get("slug", "") | |
| if fnmatch.fnmatch(name, glob_pattern) or fnmatch.fnmatch(slug, glob_pattern): | |
| matching_datasets.append(dataset) | |
| return matching_datasets | |
| def display_datasets_table(datasets: List[Dict[str, Any]], show_details: bool = False) -> None: | |
| """Display datasets in a rich table format.""" | |
| table = Table( | |
| show_header=True, | |
| header_style="bold", | |
| box=box.SIMPLE, | |
| title="Honeycomb Datasets", | |
| title_style="bold" | |
| ) | |
| # Add columns | |
| table.add_column("Name") | |
| table.add_column("Slug", style="dim") | |
| table.add_column("Last Written") | |
| table.add_column("Fields", justify="right") | |
| if show_details: | |
| table.add_column("Description") | |
| table.add_column("Protected", justify="center") | |
| # Add rows | |
| for dataset in datasets: | |
| row = [ | |
| dataset.get("name", ""), | |
| dataset.get("slug", ""), | |
| dataset.get("last_written_at", "Never") or "Never", | |
| str(dataset.get("regular_columns_count", 0) or 0), | |
| ] | |
| if show_details: | |
| row.extend([ | |
| dataset.get("description", ""), | |
| "●" if dataset.get("settings", {}).get("delete_protected", False) else "○" | |
| ]) | |
| table.add_row(*row) | |
| console.print() | |
| console.print(table) | |
| console.print(f"\nTotal datasets: {len(datasets)}") | |
| def display_dataset_details(dataset: Dict[str, Any]) -> None: | |
| """Display detailed information about a dataset.""" | |
| panel = Panel( | |
| "\n".join([ | |
| f"Name: {dataset.get('name')}", | |
| f"Slug: {dataset.get('slug')}", | |
| f"Last Written: {dataset.get('last_written_at') or 'Never'}", | |
| f"Number of Fields: {dataset.get('regular_columns_count', 0) or 0}", | |
| f"Description: {dataset.get('description') or 'No description'}", | |
| f"Delete Protected: {'Yes' if dataset.get('settings', {}).get('delete_protected', False) else 'No'}", | |
| f"Created At: {dataset.get('created_at')}" | |
| ]), | |
| title="Dataset Details", | |
| title_align="left", | |
| border_style="dim", | |
| padding=(1, 2) | |
| ) | |
| console.print() | |
| console.print(panel) | |
| @click.group() | |
| def cli(): | |
| """Honeycomb Dataset Management CLI""" | |
| pass | |
| @cli.command() | |
| @click.option('--pattern', '-p', help='Filter datasets by name/slug pattern (glob syntax)') | |
| @click.option('--details', '-d', is_flag=True, help='Show additional details') | |
| def list(pattern: Optional[str], details: bool): | |
| """List all datasets with optional filtering""" | |
| api_key = get_api_key() | |
| with console.status("Fetching datasets..."): | |
| datasets = list_datasets(api_key) | |
| if pattern: | |
| datasets = filter_datasets_by_glob(datasets, pattern) | |
| if not datasets: | |
| console.print(f"\n[yellow]No datasets found matching pattern: {pattern}[/yellow]") | |
| return | |
| display_datasets_table(datasets, show_details=details) | |
| @cli.command() | |
| @click.argument('dataset-slug') | |
| def describe(dataset_slug: str): | |
| """Show detailed information about a specific dataset""" | |
| api_key = get_api_key() | |
| with console.status(f"Fetching details for dataset: {dataset_slug}..."): | |
| dataset = get_dataset(api_key, dataset_slug) | |
| if dataset: | |
| display_dataset_details(dataset) | |
| else: | |
| console.print(f"\n[red]Dataset not found: {dataset_slug}[/red]") | |
| @cli.command() | |
| @click.argument('pattern') | |
| @click.option('--force', '-f', is_flag=True, help='Skip confirmation prompt') | |
| def delete(pattern: str, force: bool): | |
| """Delete datasets matching a pattern (glob syntax)""" | |
| api_key = get_api_key() | |
| # First phase: Fetch and filter datasets | |
| with console.status("Working...") as status: | |
| status.update("Fetching datasets...") | |
| all_datasets = list_datasets(api_key) | |
| status.update(f"Filtering datasets by pattern: '{pattern}'") | |
| matching_datasets = filter_datasets_by_glob(all_datasets, pattern) | |
| if not matching_datasets: | |
| console.print(f"\nNo datasets found matching pattern: '{pattern}'") | |
| return | |
| # Show matching datasets and get confirmation | |
| console.print("\nThe following datasets will be deleted:") | |
| display_datasets_table(matching_datasets, show_details=True) | |
| # Confirm deletion outside of any status display | |
| if not force and not click.confirm("\nAre you sure you want to delete these datasets? This action cannot be undone"): | |
| console.print("\nOperation cancelled.") | |
| return | |
| # Second phase: Process deletions | |
| success_count = 0 | |
| failure_count = 0 | |
| with console.status("Deleting datasets...") as status: | |
| for dataset in matching_datasets: | |
| dataset_slug = dataset.get("slug") | |
| dataset_name = dataset.get("name") | |
| status.update(f"Processing dataset: {dataset_name}") | |
| # Check if the dataset is delete protected | |
| is_protected = dataset.get("settings", {}).get("delete_protected", False) | |
| if is_protected: | |
| status.update(f"Disabling delete protection for {dataset_name}...") | |
| if not update_dataset_protection(api_key, dataset_slug, False): | |
| console.print(f"Error: Failed to disable delete protection for {dataset_name}") | |
| failure_count += 1 | |
| continue | |
| # Wait a moment for the update to take effect | |
| time.sleep(1) | |
| # Delete the dataset | |
| if delete_dataset(api_key, dataset_slug): | |
| console.print(f"Successfully deleted dataset: {dataset_name}") | |
| success_count += 1 | |
| else: | |
| console.print(f"Error: Failed to delete dataset: {dataset_name}") | |
| failure_count += 1 | |
| # Show summary | |
| console.print(f"\nOperation complete:") | |
| console.print(f" • {success_count} datasets deleted successfully") | |
| if failure_count > 0: | |
| console.print(f" • {failure_count} operations failed") | |
| if __name__ == "__main__": | |
| cli() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment