Last active
March 8, 2026 21:28
-
-
Save DuaneNielsen/422ae928f49afffc85def4beb4c1a0bb to your computer and use it in GitHub Desktop.
GPU RGB monitor - maps NVIDIA GPU utilization to RGB colors via OpenRGB
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| #!/usr/bin/env python3 | |
| """GPU RGB monitor - maps GPU utilization to cool-to-hot RGB colors via OpenRGB CLI.""" | |
| # Gist: https://gist.github.com/DuaneNielsen/422ae928f49afffc85def4beb4c1a0bb | |
| import subprocess | |
| import time | |
| import signal | |
| import sys | |
| import logging | |
| import re | |
| from collections import deque | |
| logging.basicConfig(level=logging.INFO, format="%(asctime)s %(levelname)s %(message)s") | |
| log = logging.getLogger("gpu-rgb") | |
| POLL_INTERVAL = 1.0 | |
| SMOOTH_WINDOW = 5 # seconds of averaging | |
| IDLE_THRESHOLD = 5 # percent utilization - below this, LEDs off | |
| OPENRGB_BIN = "/usr/local/bin/openrgb" | |
| OPENRGB_PORT = "6742" | |
| GPU_NAME = "RTX 4090" | |
| # Color stops: blue (cold) -> cyan -> green -> yellow -> orange -> red (hot) | |
| GRADIENT = [ | |
| (5, 0, 0, 255), | |
| (25, 0, 180, 255), | |
| (45, 0, 255, 80), | |
| (65, 255, 255, 0), | |
| (85, 255, 120, 0), | |
| (100, 255, 0, 0), | |
| ] | |
| server_proc = None | |
| last_color = None | |
| gpu_device_id = None | |
| def lerp(a, b, t): | |
| return int(a + (b - a) * t) | |
| def gradient_color(util): | |
| if util <= GRADIENT[0][0]: | |
| return GRADIENT[0][1:] | |
| if util >= GRADIENT[-1][0]: | |
| return GRADIENT[-1][1:] | |
| for i in range(len(GRADIENT) - 1): | |
| u0, r0, g0, b0 = GRADIENT[i] | |
| u1, r1, g1, b1 = GRADIENT[i + 1] | |
| if u0 <= util <= u1: | |
| t = (util - u0) / (u1 - u0) | |
| return (lerp(r0, r1, t), lerp(g0, g1, t), lerp(b0, b1, t)) | |
| return (0, 0, 0) | |
| def find_gpu_device(): | |
| """Find the GPU device index by searching for GPU_NAME in --list-devices output.""" | |
| try: | |
| out = subprocess.check_output( | |
| [OPENRGB_BIN, "--client", f"localhost:{OPENRGB_PORT}", "--list-devices"], | |
| text=True, timeout=15, stderr=subprocess.DEVNULL | |
| ) | |
| for line in out.splitlines(): | |
| match = re.match(r"^(\d+):\s+(.+)", line) | |
| if match and GPU_NAME in match.group(2): | |
| device_id = match.group(1) | |
| log.info("Found GPU '%s' at device index %s", match.group(2).strip(), device_id) | |
| return device_id | |
| except Exception as e: | |
| log.error("Failed to list devices: %s", e) | |
| log.error("Could not find device matching '%s'", GPU_NAME) | |
| sys.exit(1) | |
| def get_gpu_utilization(): | |
| try: | |
| out = subprocess.check_output( | |
| ["nvidia-smi", "--query-gpu=utilization.gpu", | |
| "--format=csv,noheader,nounits"], | |
| text=True, timeout=5 | |
| ).strip() | |
| return int(out) | |
| except Exception as e: | |
| log.warning("nvidia-smi failed: %s", e) | |
| return 0 | |
| def set_color(r, g, b): | |
| global last_color | |
| color_hex = f"{r:02X}{g:02X}{b:02X}" | |
| if color_hex == last_color: | |
| return | |
| try: | |
| subprocess.run( | |
| [OPENRGB_BIN, "--client", f"localhost:{OPENRGB_PORT}", | |
| "--device", gpu_device_id, "--mode", "direct", "--color", color_hex], | |
| stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL, | |
| timeout=5 | |
| ) | |
| last_color = color_hex | |
| except Exception as e: | |
| log.warning("openrgb CLI failed: %s", e) | |
| def start_server(): | |
| global server_proc | |
| server_proc = subprocess.Popen( | |
| [OPENRGB_BIN, "--server", "--server-port", OPENRGB_PORT], | |
| stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL | |
| ) | |
| log.info("OpenRGB server starting (pid %d), waiting 15s...", server_proc.pid) | |
| time.sleep(15) | |
| log.info("Server ready") | |
| def cleanup(*_): | |
| log.info("Shutting down, turning off LEDs") | |
| set_color(0, 0, 0) | |
| if server_proc: | |
| server_proc.terminate() | |
| try: | |
| server_proc.wait(timeout=5) | |
| except subprocess.TimeoutExpired: | |
| server_proc.kill() | |
| sys.exit(0) | |
| def main(): | |
| global gpu_device_id | |
| signal.signal(signal.SIGTERM, cleanup) | |
| signal.signal(signal.SIGINT, cleanup) | |
| start_server() | |
| gpu_device_id = find_gpu_device() | |
| samples = deque(maxlen=int(SMOOTH_WINDOW / POLL_INTERVAL)) | |
| log.info("Starting main loop") | |
| while True: | |
| util = get_gpu_utilization() | |
| samples.append(util) | |
| avg = sum(samples) / len(samples) | |
| if avg < IDLE_THRESHOLD: | |
| set_color(0, 0, 0) | |
| else: | |
| r, g, b = gradient_color(avg) | |
| set_color(r, g, b) | |
| time.sleep(POLL_INTERVAL) | |
| if __name__ == "__main__": | |
| main() |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment