|
#!/usr/bin/env nix-shell |
|
#!nix-shell -i python3 -p python3 python3Packages.requests python3Packages.openpyxl python3Packages.anthropic |
|
""" |
|
Numtide Contribution Report Generator |
|
Fetches GitHub, Harvest, and Linear data to create a monthly contribution report. |
|
|
|
Usage: |
|
python reporter.py [options] |
|
python reporter.py --help |
|
""" |
|
|
|
import argparse |
|
import os |
|
from collections import defaultdict |
|
from datetime import datetime |
|
|
|
import anthropic |
|
import requests |
|
from openpyxl import Workbook |
|
from openpyxl.styles import Alignment, Border, Font, PatternFill, Side |
|
|
|
|
|
def parse_args(): |
|
parser = argparse.ArgumentParser( |
|
description="Generate a contribution report from GitHub, Harvest, and Linear data.", |
|
formatter_class=argparse.ArgumentDefaultsHelpFormatter, |
|
) |
|
|
|
# GitHub options |
|
parser.add_argument( |
|
"--github-username", |
|
default="aldoborrero", |
|
help="GitHub username to fetch contributions for", |
|
) |
|
parser.add_argument( |
|
"--github-orgs", |
|
nargs="+", |
|
default=["Arkiv-Network", "Golem-Base"], |
|
help="GitHub organizations to check", |
|
) |
|
parser.add_argument( |
|
"--github-token", |
|
default=os.environ.get("GITHUB_TOKEN", ""), |
|
help="GitHub API token (or set GITHUB_TOKEN env var)", |
|
) |
|
|
|
# Date range |
|
parser.add_argument( |
|
"--start-date", |
|
default="2025-07-01", |
|
help="Start date for the report (YYYY-MM-DD)", |
|
) |
|
parser.add_argument( |
|
"--end-date", |
|
default="2025-12-31", |
|
help="End date for the report (YYYY-MM-DD)", |
|
) |
|
|
|
# Harvest options |
|
parser.add_argument( |
|
"--harvest-account-id", |
|
default=os.environ.get("HARVEST_ACCOUNT_ID", ""), |
|
help="Harvest account ID (or set HARVEST_ACCOUNT_ID env var)", |
|
) |
|
parser.add_argument( |
|
"--harvest-token", |
|
default=os.environ.get("HARVEST_TOKEN", ""), |
|
help="Harvest API token (or set HARVEST_TOKEN env var)", |
|
) |
|
|
|
# Linear options |
|
parser.add_argument( |
|
"--linear-api-key", |
|
default=os.environ.get("LINEAR_API_KEY", ""), |
|
help="Linear API key (or set LINEAR_API_KEY env var)", |
|
) |
|
|
|
# Output |
|
parser.add_argument( |
|
"--output", |
|
"-o", |
|
default="report.xlsx", |
|
help="Output file path for the Excel report", |
|
) |
|
|
|
# AI Summary |
|
parser.add_argument( |
|
"--anthropic-api-key", |
|
default=os.environ.get("ANTHROPIC_API_KEY", ""), |
|
help="Anthropic API key for AI summaries (or set ANTHROPIC_API_KEY env var)", |
|
) |
|
|
|
return parser.parse_args() |
|
|
|
|
|
# ============ GITHUB FUNCTIONS ============ |
|
|
|
|
|
def get_github_headers(token): |
|
headers = {"Accept": "application/vnd.github+json"} |
|
if token: |
|
headers["Authorization"] = f"Bearer {token}" |
|
return headers |
|
|
|
|
|
def get_org_repos(org, token): |
|
"""Fetch all repositories in the organization.""" |
|
repos = [] |
|
page = 1 |
|
while True: |
|
url = f"https://api.github.com/orgs/{org}/repos?per_page=100&page={page}" |
|
resp = requests.get(url, headers=get_github_headers(token)) |
|
if resp.status_code != 200: |
|
print(f"Error fetching repos: {resp.status_code}") |
|
break |
|
data = resp.json() |
|
if not data: |
|
break |
|
repos.extend([r["name"] for r in data]) |
|
page += 1 |
|
return repos |
|
|
|
|
|
def get_user_commits(org, repo, username, since, until, token): |
|
"""Fetch commits by user in a repo within date range.""" |
|
commits = [] |
|
page = 1 |
|
while True: |
|
url = f"https://api.github.com/repos/{org}/{repo}/commits" |
|
params = { |
|
"author": username, |
|
"since": since, |
|
"until": until, |
|
"per_page": 100, |
|
"page": page, |
|
} |
|
resp = requests.get(url, headers=get_github_headers(token), params=params) |
|
if resp.status_code == 409: # Empty repo |
|
break |
|
if resp.status_code != 200: |
|
break |
|
data = resp.json() |
|
if not data: |
|
break |
|
for c in data: |
|
commits.append( |
|
{ |
|
"sha": c["sha"][:7], |
|
"message": c["commit"]["message"].split("\n")[0][:80], |
|
"date": c["commit"]["author"]["date"], |
|
"url": c["html_url"], |
|
} |
|
) |
|
page += 1 |
|
return commits |
|
|
|
|
|
def get_user_prs(org, repo, username, since, until, token): |
|
"""Fetch PRs by user in a repo within date range.""" |
|
prs = [] |
|
page = 1 |
|
since_dt = datetime.fromisoformat(since.replace("Z", "+00:00")) |
|
until_dt = datetime.fromisoformat(until.replace("Z", "+00:00")) |
|
|
|
while True: |
|
url = f"https://api.github.com/repos/{org}/{repo}/pulls" |
|
params = {"state": "all", "per_page": 100, "page": page} |
|
resp = requests.get(url, headers=get_github_headers(token), params=params) |
|
if resp.status_code != 200: |
|
break |
|
data = resp.json() |
|
if not data: |
|
break |
|
|
|
for pr in data: |
|
if pr["user"]["login"] != username: |
|
continue |
|
created = datetime.fromisoformat(pr["created_at"].replace("Z", "+00:00")) |
|
if since_dt <= created <= until_dt: |
|
prs.append( |
|
{ |
|
"number": pr["number"], |
|
"title": pr["title"][:80], |
|
"state": pr["state"], |
|
"created_at": pr["created_at"], |
|
"url": pr["html_url"], |
|
} |
|
) |
|
page += 1 |
|
return prs |
|
|
|
|
|
def fetch_all_github_contributions(args): |
|
"""Fetch all GitHub contributions and organize by month.""" |
|
print(f"Fetching GitHub contributions for {args.github_username}...") |
|
|
|
# Organize by month |
|
monthly_data = defaultdict( |
|
lambda: {"repos": set(), "commits": [], "prs": [], "orgs": set()} |
|
) |
|
|
|
since = f"{args.start_date}T00:00:00Z" |
|
until = f"{args.end_date}T23:59:59Z" |
|
|
|
for org in args.github_orgs: |
|
print(f"\n--- Organization: {org} ---") |
|
repos = get_org_repos(org, args.github_token) |
|
print(f"Found {len(repos)} repositories") |
|
|
|
for repo in repos: |
|
print(f" Checking {repo}...", end=" ") |
|
|
|
commits = get_user_commits( |
|
org, repo, args.github_username, since, until, args.github_token |
|
) |
|
prs = get_user_prs( |
|
org, repo, args.github_username, since, until, args.github_token |
|
) |
|
|
|
if commits or prs: |
|
print(f"{len(commits)} commits, {len(prs)} PRs") |
|
else: |
|
print("no contributions") |
|
continue |
|
|
|
full_repo = f"{org}/{repo}" |
|
|
|
for c in commits: |
|
month = c["date"][:7] # YYYY-MM |
|
monthly_data[month]["repos"].add(full_repo) |
|
monthly_data[month]["orgs"].add(org) |
|
monthly_data[month]["commits"].append( |
|
{**c, "repo": full_repo, "org": org} |
|
) |
|
|
|
for pr in prs: |
|
month = pr["created_at"][:7] |
|
monthly_data[month]["repos"].add(full_repo) |
|
monthly_data[month]["orgs"].add(org) |
|
monthly_data[month]["prs"].append({**pr, "repo": full_repo, "org": org}) |
|
|
|
return dict(monthly_data) |
|
|
|
|
|
# ============ HARVEST FUNCTIONS ============ |
|
|
|
|
|
def fetch_harvest_data(args): |
|
"""Fetch Harvest time entries (requires API token).""" |
|
if not args.harvest_token or not args.harvest_account_id: |
|
print("Harvest credentials not configured, skipping...") |
|
return {} |
|
|
|
print("Fetching Harvest time entries...") |
|
headers = { |
|
"Authorization": f"Bearer {args.harvest_token}", |
|
"Harvest-Account-Id": args.harvest_account_id, |
|
} |
|
|
|
# Get current user ID |
|
me_resp = requests.get("https://api.harvestapp.com/v2/users/me", headers=headers) |
|
if me_resp.status_code != 200: |
|
print(f"Harvest API error getting user: {me_resp.status_code}") |
|
return {} |
|
user_id = me_resp.json().get("id") |
|
|
|
monthly_data = defaultdict( |
|
lambda: {"total_hours": 0, "projects": defaultdict(float), "entries": []} |
|
) |
|
page = 1 |
|
|
|
while True: |
|
url = "https://api.harvestapp.com/v2/time_entries" |
|
params = { |
|
"user_id": user_id, |
|
"from": args.start_date, |
|
"to": args.end_date, |
|
"per_page": 100, |
|
"page": page, |
|
} |
|
resp = requests.get(url, headers=headers, params=params) |
|
|
|
if resp.status_code != 200: |
|
print(f"Harvest API error: {resp.status_code}") |
|
break |
|
|
|
data = resp.json() |
|
for entry in data.get("time_entries", []): |
|
month = entry["spent_date"][:7] |
|
hours = entry["hours"] |
|
project = entry.get("project", {}).get("name", "Unknown") |
|
task = entry.get("task", {}).get("name", "") |
|
notes = entry.get("notes") or "" |
|
|
|
monthly_data[month]["total_hours"] += hours |
|
monthly_data[month]["projects"][project] += hours |
|
monthly_data[month]["entries"].append( |
|
{ |
|
"date": entry["spent_date"], |
|
"project": project, |
|
"task": task, |
|
"notes": notes, |
|
"hours": hours, |
|
} |
|
) |
|
|
|
if not data.get("next_page"): |
|
break |
|
page += 1 |
|
|
|
return dict(monthly_data) |
|
|
|
|
|
# ============ LINEAR FUNCTIONS ============ |
|
|
|
|
|
def fetch_linear_data(args): |
|
"""Fetch Linear issues (requires API key).""" |
|
if not args.linear_api_key: |
|
print("Linear API key not configured, skipping...") |
|
return {} |
|
|
|
print("Fetching Linear issues...") |
|
|
|
query = """ |
|
query($after: String) { |
|
viewer { |
|
assignedIssues( |
|
first: 100 |
|
after: $after |
|
filter: { completedAt: { gte: "%s", lte: "%s" } } |
|
) { |
|
pageInfo { hasNextPage endCursor } |
|
nodes { |
|
identifier |
|
title |
|
completedAt |
|
url |
|
team { name } |
|
} |
|
} |
|
} |
|
} |
|
""" % (args.start_date, args.end_date) |
|
|
|
headers = {"Authorization": args.linear_api_key, "Content-Type": "application/json"} |
|
monthly_issues = defaultdict(list) |
|
cursor = None |
|
|
|
while True: |
|
resp = requests.post( |
|
"https://api.linear.app/graphql", |
|
headers=headers, |
|
json={"query": query, "variables": {"after": cursor}}, |
|
) |
|
if resp.status_code != 200: |
|
print(f"Linear API error: {resp.status_code}") |
|
break |
|
|
|
data = resp.json().get("data", {}).get("viewer", {}).get("assignedIssues", {}) |
|
for issue in data.get("nodes", []): |
|
if issue.get("completedAt"): |
|
month = issue["completedAt"][:7] |
|
monthly_issues[month].append( |
|
{ |
|
"id": issue["identifier"], |
|
"title": issue["title"][:60], |
|
"team": issue.get("team", {}).get("name", ""), |
|
"url": issue["url"], |
|
} |
|
) |
|
|
|
page_info = data.get("pageInfo", {}) |
|
if not page_info.get("hasNextPage"): |
|
break |
|
cursor = page_info.get("endCursor") |
|
|
|
return dict(monthly_issues) |
|
|
|
|
|
# ============ REPORT GENERATION ============ |
|
|
|
|
|
def generate_ai_summary(github_data, harvest_data, linear_data, month, api_key): |
|
"""Generate an AI summary for a month using Claude.""" |
|
gh = github_data.get(month, {}) |
|
hv = harvest_data.get(month, {}) |
|
li = linear_data.get(month, []) |
|
|
|
# Build context for AI |
|
context_parts = [] |
|
|
|
# GitHub PRs |
|
prs = gh.get("prs", []) |
|
if prs: |
|
pr_list = "\n".join(f"- {pr['title']} ({pr['repo']})" for pr in prs) |
|
context_parts.append(f"Pull Requests:\n{pr_list}") |
|
|
|
# GitHub commits |
|
commits = gh.get("commits", []) |
|
if commits: |
|
commit_list = "\n".join(f"- {c['message']} ({c['repo']})" for c in commits[:20]) |
|
if len(commits) > 20: |
|
commit_list += f"\n- ... and {len(commits) - 20} more commits" |
|
context_parts.append(f"Commits:\n{commit_list}") |
|
|
|
# Harvest entries |
|
entries = hv.get("entries", []) |
|
if entries: |
|
harvest_list = "\n".join( |
|
f"- {e['project']}: {e['task']} - {e['notes']} ({e['hours']}h)" |
|
for e in entries |
|
if e.get("notes") |
|
) |
|
if harvest_list: |
|
context_parts.append(f"Time entries:\n{harvest_list}") |
|
total = hv.get("total_hours", 0) |
|
context_parts.append(f"Total hours logged: {total:.1f}h") |
|
|
|
# Linear issues |
|
if li: |
|
linear_list = "\n".join(f"- {issue['title']} ({issue['team']})" for issue in li) |
|
context_parts.append(f"Completed issues:\n{linear_list}") |
|
|
|
if not context_parts: |
|
return "No recorded contributions." |
|
|
|
context = "\n\n".join(context_parts) |
|
|
|
client = anthropic.Anthropic(api_key=api_key) |
|
message = client.messages.create( |
|
model="claude-sonnet-4-20250514", |
|
max_tokens=300, |
|
messages=[ |
|
{ |
|
"role": "user", |
|
"content": f"""Summarize this developer's work for {month} in 2-4 concise bullet points. |
|
Focus on the main achievements and themes. Be specific about what was built/fixed. |
|
Keep each bullet under 15 words. Use • for bullets. |
|
|
|
{context}""", |
|
} |
|
], |
|
) |
|
return message.content[0].text |
|
|
|
|
|
def generate_fallback_summary(github_data, harvest_data, linear_data, month): |
|
"""Generate a basic bullet-point summary for a month.""" |
|
lines = [] |
|
|
|
gh = github_data.get(month, {}) |
|
repos = gh.get("repos", set()) |
|
commits = gh.get("commits", []) |
|
prs = gh.get("prs", []) |
|
|
|
if repos: |
|
# Group repos by org |
|
org_repos = defaultdict(list) |
|
for repo in repos: |
|
org, name = repo.split("/", 1) |
|
org_repos[org].append(name) |
|
|
|
for org, repo_list in org_repos.items(): |
|
repo_names = ", ".join(sorted(repo_list)) |
|
lines.append(f"• {org}: {repo_names}") |
|
|
|
if commits or prs: |
|
stats = [] |
|
if commits: |
|
stats.append(f"{len(commits)} commits") |
|
if prs: |
|
stats.append(f"{len(prs)} PRs") |
|
lines.append(f" ({', '.join(stats)})") |
|
|
|
hv = harvest_data.get(month, {}) |
|
if hv.get("total_hours"): |
|
lines.append(f"• Harvest: {hv['total_hours']:.1f}h logged") |
|
|
|
li = linear_data.get(month, []) |
|
if li: |
|
lines.append(f"• Linear: {len(li)} issues completed") |
|
|
|
if not lines: |
|
return "No recorded contributions." |
|
|
|
return "\n".join(lines) |
|
|
|
|
|
def generate_summary(github_data, harvest_data, linear_data, month, args): |
|
"""Generate summary - uses AI if API key available, otherwise fallback.""" |
|
if args.anthropic_api_key: |
|
try: |
|
return generate_ai_summary( |
|
github_data, harvest_data, linear_data, month, args.anthropic_api_key |
|
) |
|
except Exception as e: |
|
print(f"AI summary failed for {month}: {e}") |
|
return generate_fallback_summary(github_data, harvest_data, linear_data, month) |
|
return generate_fallback_summary(github_data, harvest_data, linear_data, month) |
|
|
|
|
|
def create_excel_report(github_data, harvest_data, linear_data, args): |
|
"""Create the Excel report.""" |
|
wb = Workbook() |
|
|
|
# ===== Summary Sheet ===== |
|
ws = wb.active |
|
ws.title = "Summary" |
|
|
|
# Styles |
|
header_font = Font(bold=True, size=12) |
|
header_fill = PatternFill("solid", fgColor="4472C4") |
|
header_font_white = Font(bold=True, size=12, color="FFFFFF") |
|
thin_border = Border( |
|
left=Side(style="thin"), |
|
right=Side(style="thin"), |
|
top=Side(style="thin"), |
|
bottom=Side(style="thin"), |
|
) |
|
|
|
# Title |
|
ws.merge_cells("A1:E1") |
|
ws["A1"] = f"Contribution Report - {args.github_username}" |
|
ws["A1"].font = Font(bold=True, size=16) |
|
ws["A2"] = f"Organizations: {', '.join(args.github_orgs)}" |
|
ws["A3"] = f"Period: {args.start_date} to {args.end_date}" |
|
ws["A4"] = f"Generated: {datetime.now().strftime('%Y-%m-%d %H:%M')}" |
|
|
|
# Headers |
|
headers = ["Month", "Summary", "GitHub Repos", "Harvest Hours", "Linear Issues"] |
|
for col, header in enumerate(headers, 1): |
|
cell = ws.cell(row=6, column=col, value=header) |
|
cell.font = header_font_white |
|
cell.fill = header_fill |
|
cell.border = thin_border |
|
cell.alignment = Alignment(horizontal="center", wrap_text=True) |
|
|
|
# Column widths |
|
ws.column_dimensions["A"].width = 12 |
|
ws.column_dimensions["B"].width = 60 |
|
ws.column_dimensions["C"].width = 40 |
|
ws.column_dimensions["D"].width = 15 |
|
ws.column_dimensions["E"].width = 15 |
|
|
|
# Generate months from date range |
|
start_dt = datetime.strptime(args.start_date, "%Y-%m-%d") |
|
end_dt = datetime.strptime(args.end_date, "%Y-%m-%d") |
|
months = [] |
|
current = start_dt.replace(day=1) |
|
while current <= end_dt: |
|
months.append(current.strftime("%Y-%m")) |
|
# Move to next month |
|
if current.month == 12: |
|
current = current.replace(year=current.year + 1, month=1) |
|
else: |
|
current = current.replace(month=current.month + 1) |
|
|
|
row = 7 |
|
for month in months: |
|
month_dt = datetime.strptime(month, "%Y-%m") |
|
month_name = month_dt.strftime("%B %Y") |
|
gh = github_data.get(month, {}) |
|
hv = harvest_data.get(month, {}) |
|
li = linear_data.get(month, []) |
|
|
|
summary = generate_summary(github_data, harvest_data, linear_data, month, args) |
|
repos = ", ".join(sorted(gh.get("repos", set()))) |
|
hours = hv.get("total_hours", 0) |
|
issues = len(li) |
|
|
|
ws.cell(row=row, column=1, value=month_name).border = thin_border |
|
ws.cell(row=row, column=1).alignment = Alignment(vertical="top") |
|
ws.cell(row=row, column=2, value=summary).border = thin_border |
|
ws.cell(row=row, column=2).alignment = Alignment(wrap_text=True, vertical="top") |
|
ws.cell(row=row, column=3, value=repos).border = thin_border |
|
ws.cell(row=row, column=3).alignment = Alignment(wrap_text=True, vertical="top") |
|
ws.cell(row=row, column=4, value=hours if hours else "-").border = thin_border |
|
ws.cell(row=row, column=5, value=issues if issues else "-").border = thin_border |
|
|
|
ws.row_dimensions[row].height = 80 |
|
row += 1 |
|
|
|
# ===== GitHub Details Sheet ===== |
|
ws_gh = wb.create_sheet("GitHub Details") |
|
ws_gh["A1"] = "GitHub Contributions Detail" |
|
ws_gh["A1"].font = Font(bold=True, size=14) |
|
|
|
gh_headers = [ |
|
"Month", |
|
"Organization", |
|
"Repository", |
|
"Type", |
|
"Title/Message", |
|
"Link", |
|
] |
|
for col, header in enumerate(gh_headers, 1): |
|
cell = ws_gh.cell(row=3, column=col, value=header) |
|
cell.font = header_font_white |
|
cell.fill = header_fill |
|
|
|
ws_gh.column_dimensions["A"].width = 12 |
|
ws_gh.column_dimensions["B"].width = 18 |
|
ws_gh.column_dimensions["C"].width = 25 |
|
ws_gh.column_dimensions["D"].width = 10 |
|
ws_gh.column_dimensions["E"].width = 60 |
|
ws_gh.column_dimensions["F"].width = 50 |
|
|
|
row = 4 |
|
for month in months: |
|
gh = github_data.get(month, {}) |
|
for c in gh.get("commits", []): |
|
ws_gh.cell(row=row, column=1, value=month) |
|
ws_gh.cell(row=row, column=2, value=c.get("org", "")) |
|
ws_gh.cell(row=row, column=3, value=c["repo"].split("/")[-1]) |
|
ws_gh.cell(row=row, column=4, value="Commit") |
|
ws_gh.cell(row=row, column=5, value=c["message"]) |
|
ws_gh.cell(row=row, column=6, value=c["url"]) |
|
row += 1 |
|
for pr in gh.get("prs", []): |
|
ws_gh.cell(row=row, column=1, value=month) |
|
ws_gh.cell(row=row, column=2, value=pr.get("org", "")) |
|
ws_gh.cell(row=row, column=3, value=pr["repo"].split("/")[-1]) |
|
ws_gh.cell(row=row, column=4, value="PR") |
|
ws_gh.cell(row=row, column=5, value=f"#{pr['number']}: {pr['title']}") |
|
ws_gh.cell(row=row, column=6, value=pr["url"]) |
|
row += 1 |
|
|
|
# ===== Linear Details Sheet ===== |
|
ws_li = wb.create_sheet("Linear Details") |
|
ws_li["A1"] = "Linear Issues Detail" |
|
ws_li["A1"].font = Font(bold=True, size=14) |
|
|
|
li_headers = ["Month", "Team", "Issue ID", "Title", "Link"] |
|
for col, header in enumerate(li_headers, 1): |
|
cell = ws_li.cell(row=3, column=col, value=header) |
|
cell.font = header_font_white |
|
cell.fill = header_fill |
|
|
|
ws_li.column_dimensions["A"].width = 12 |
|
ws_li.column_dimensions["B"].width = 20 |
|
ws_li.column_dimensions["C"].width = 12 |
|
ws_li.column_dimensions["D"].width = 60 |
|
ws_li.column_dimensions["E"].width = 50 |
|
|
|
row = 4 |
|
for month in months: |
|
for issue in linear_data.get(month, []): |
|
ws_li.cell(row=row, column=1, value=month) |
|
ws_li.cell(row=row, column=2, value=issue.get("team", "")) |
|
ws_li.cell(row=row, column=3, value=issue["id"]) |
|
ws_li.cell(row=row, column=4, value=issue["title"]) |
|
ws_li.cell(row=row, column=5, value=issue["url"]) |
|
row += 1 |
|
|
|
# ===== Harvest Details Sheet ===== |
|
ws_hv = wb.create_sheet("Harvest Details") |
|
ws_hv["A1"] = "Harvest Time Entries Detail" |
|
ws_hv["A1"].font = Font(bold=True, size=14) |
|
|
|
hv_headers = ["Month", "Date", "Project", "Task", "Notes", "Hours"] |
|
for col, header in enumerate(hv_headers, 1): |
|
cell = ws_hv.cell(row=3, column=col, value=header) |
|
cell.font = header_font_white |
|
cell.fill = header_fill |
|
|
|
ws_hv.column_dimensions["A"].width = 12 |
|
ws_hv.column_dimensions["B"].width = 12 |
|
ws_hv.column_dimensions["C"].width = 25 |
|
ws_hv.column_dimensions["D"].width = 20 |
|
ws_hv.column_dimensions["E"].width = 50 |
|
ws_hv.column_dimensions["F"].width = 10 |
|
|
|
row = 4 |
|
for month in months: |
|
hv = harvest_data.get(month, {}) |
|
for entry in hv.get("entries", []): |
|
ws_hv.cell(row=row, column=1, value=month) |
|
ws_hv.cell(row=row, column=2, value=entry["date"]) |
|
ws_hv.cell(row=row, column=3, value=entry["project"]) |
|
ws_hv.cell(row=row, column=4, value=entry["task"]) |
|
ws_hv.cell(row=row, column=5, value=entry["notes"]) |
|
ws_hv.cell(row=row, column=6, value=entry["hours"]) |
|
row += 1 |
|
|
|
# Save |
|
wb.save(args.output) |
|
print(f"\nReport saved to: {args.output}") |
|
return args.output |
|
|
|
|
|
# ============ MAIN ============ |
|
|
|
|
|
def main(): |
|
args = parse_args() |
|
|
|
print("=" * 60) |
|
print("Contribution Report Generator") |
|
print(f"User: {args.github_username}") |
|
print(f"Organizations: {', '.join(args.github_orgs)}") |
|
print(f"Period: {args.start_date} to {args.end_date}") |
|
print("=" * 60) |
|
|
|
github_data = fetch_all_github_contributions(args) |
|
harvest_data = fetch_harvest_data(args) |
|
linear_data = fetch_linear_data(args) |
|
|
|
create_excel_report(github_data, harvest_data, linear_data, args) |
|
|
|
print("\n" + "=" * 60) |
|
print("NEXT STEPS:") |
|
print("1. Review the generated report") |
|
print("2. Edit summaries as needed") |
|
print("3. Add any missing data from Harvest/Linear exports") |
|
print("4. Upload to Google Drive for sharing") |
|
print("=" * 60) |
|
|
|
|
|
if __name__ == "__main__": |
|
main() |