Created
March 6, 2026 22:03
-
-
Save justinabrahms/7400581a7154ef377fabf231d2def16e to your computer and use it in GitHub Desktop.
Jira: Fix Version → Done delta analysis for TM discovery components
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """ | |
| Analyze the time between when a Fix Version was added and when a Jira ticket | |
| was marked Done, for tickets in discovery components of the TM project. | |
| Requirements: | |
| - JIRA_API_TOKEN env var set | |
| - JIRA_EMAIL env var set | |
| - pip install requests | |
| Usage: | |
| export JIRA_API_TOKEN="your-token" | |
| export JIRA_EMAIL="you@thrivemarket.com" | |
| python3 fixversion-delta.py | |
| """ | |
| import os | |
| import sys | |
| import requests | |
| from datetime import datetime | |
| JIRA_BASE = "https://thrivemarket.atlassian.net" | |
| EMAIL = os.environ.get("JIRA_EMAIL") | |
| TOKEN = os.environ.get("JIRA_API_TOKEN") | |
| if not TOKEN or not EMAIL: | |
| print("Error: Set JIRA_API_TOKEN and JIRA_EMAIL environment variables.", file=sys.stderr) | |
| sys.exit(1) | |
| AUTH = (EMAIL, TOKEN) | |
| DISCOVERY_COMPONENTS = [ | |
| "Advanced Shopping Features (discovery)", | |
| "Core Shopping Funnel (discovery)", | |
| "Onboarding & Initial Engagement (discovery)", | |
| "Personalized Discovery & Search (discovery)", | |
| ] | |
| JQL = ( | |
| 'project = TM AND status = Done AND component in (' | |
| + ', '.join(f'"{c}"' for c in DISCOVERY_COMPONENTS) | |
| + ') AND fixVersion is not EMPTY ORDER BY updated DESC' | |
| ) | |
| def fetch_issues(jql, max_results=50): | |
| """Search for issues using the new POST-based search endpoint, with pagination.""" | |
| issues = [] | |
| next_page_token = None | |
| while len(issues) < max_results: | |
| body = { | |
| "jql": jql, | |
| "maxResults": min(50, max_results - len(issues)), | |
| "fields": ["summary", "status", "fixVersions", "resolutiondate"], | |
| } | |
| if next_page_token: | |
| body["nextPageToken"] = next_page_token | |
| r = requests.post(f"{JIRA_BASE}/rest/api/3/search/jql", auth=AUTH, json=body) | |
| r.raise_for_status() | |
| data = r.json() | |
| issues.extend(data.get("issues", [])) | |
| next_page_token = data.get("nextPageToken") | |
| if not next_page_token: | |
| break | |
| return issues[:max_results] | |
| def fetch_full_changelog(issue_key): | |
| """Fetch all changelog pages for a given issue.""" | |
| histories = [] | |
| start = 0 | |
| while True: | |
| r = requests.get( | |
| f"{JIRA_BASE}/rest/api/3/issue/{issue_key}/changelog", | |
| auth=AUTH, | |
| params={"startAt": start, "maxResults": 100}, | |
| ) | |
| r.raise_for_status() | |
| data = r.json() | |
| values = data.get("values", []) | |
| histories.extend(values) | |
| if start + len(values) >= data.get("total", 0): | |
| break | |
| start += len(values) | |
| return histories | |
| def analyze(): | |
| print(f"JQL: {JQL}\n") | |
| issues = fetch_issues(JQL) | |
| print(f"Found {len(issues)} tickets.\n") | |
| if not issues: | |
| return | |
| results = [] | |
| for issue in issues: | |
| key = issue["key"] | |
| fields = issue["fields"] | |
| summary = fields.get("summary", "")[:60] | |
| resolution_date = fields.get("resolutiondate") | |
| fix_versions = ", ".join(v["name"] for v in fields.get("fixVersions", [])) | |
| histories = fetch_full_changelog(key) | |
| fix_version_added = None | |
| done_date = None | |
| for history in histories: | |
| created = history["created"] | |
| for item in history.get("items", []): | |
| if item["field"] == "Fix Version" and item.get("toString"): | |
| if fix_version_added is None: | |
| fix_version_added = created | |
| if item["field"] == "status" and item.get("toString") == "Done": | |
| done_date = created | |
| if done_date is None: | |
| done_date = resolution_date | |
| delta_str = "missing data" | |
| if fix_version_added and done_date: | |
| try: | |
| fv_dt = datetime.fromisoformat(fix_version_added) | |
| done_dt = datetime.fromisoformat(done_date) | |
| delta = done_dt - fv_dt | |
| total_hours = delta.total_seconds() / 3600 | |
| if total_hours < 0: | |
| delta_str = "~0 (retroactive)" | |
| elif total_hours < 1: | |
| delta_str = "~0 (same time)" | |
| else: | |
| delta_str = f"{delta.days}d {delta.seconds // 3600}h" | |
| except Exception as e: | |
| delta_str = f"parse error: {e}" | |
| results.append({ | |
| "key": key, | |
| "summary": summary, | |
| "fix_version": fix_versions, | |
| "fv_added": (fix_version_added or "")[:19], | |
| "done": (done_date or "")[:19], | |
| "delta": delta_str, | |
| }) | |
| # Print table | |
| print(f"{'Key':<12} {'Fix Version':<18} {'FV Added':<22} {'Done':<22} {'Delta':<20} {'Summary'}") | |
| print("-" * 140) | |
| for r in results: | |
| print(f"{r['key']:<12} {r['fix_version']:<18} {r['fv_added']:<22} {r['done']:<22} {r['delta']:<20} {r['summary']}") | |
| # Summary stats | |
| deltas_hours = [] | |
| for r in results: | |
| d = r["delta"] | |
| if "d" in d and "h" in d and "retroactive" not in d and "same time" not in d and "error" not in d and "missing" not in d: | |
| parts = d.replace("d", "").replace("h", "").split() | |
| if len(parts) == 2: | |
| deltas_hours.append(int(parts[0]) * 24 + int(parts[1])) | |
| if deltas_hours: | |
| deltas_hours.sort() | |
| median = deltas_hours[len(deltas_hours) // 2] | |
| avg = sum(deltas_hours) / len(deltas_hours) | |
| print(f"\nSummary ({len(deltas_hours)} tickets with meaningful deltas):") | |
| print(f" Min: {min(deltas_hours) // 24}d {min(deltas_hours) % 24}h") | |
| print(f" Max: {max(deltas_hours) // 24}d {max(deltas_hours) % 24}h") | |
| print(f" Median: {median // 24}d {median % 24}h") | |
| print(f" Avg: {avg / 24:.1f}d") | |
| if __name__ == "__main__": | |
| analyze() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment