A "Best of the Best Practices" (BOBP) guide to developing in Python.
- "Build tools for others that you want to be built for you." - Kenneth Reitz
- "Simplicity is alway better than functionality." - Pieter Hintjens
| """ | |
| The most atomic way to train and run inference for a GPT in pure, dependency-free Python. | |
| This file is the complete algorithm. | |
| Everything else is just efficiency. | |
| @karpathy | |
| """ | |
| import os # os.path.exists | |
| import math # math.log, math.exp |
| #!/usr/bin/env python3 | |
| # mini-openclaw.py - A minimal OpenClaw clone | |
| # Run: uv run --with anthropic --with schedule python mini-openclaw.py | |
| import anthropic | |
| import subprocess | |
| import json | |
| import os | |
| import re | |
| import threading |
| import mlflow | |
| # There are two ways to create parent/child runs in MLflow. | |
| # (1) The most common way is to use the fluent | |
| # mlflow.start_run API, passing nested=True: | |
| with mlflow.start_run(): | |
| num_trials = 10 | |
| mlflow.log_param("num_trials", num_trials) | |
| best_loss = 1e100 |
| import matplotlib.pyplot as plt | |
| import keras.backend as K | |
| from keras.callbacks import Callback | |
| class LRFinder(Callback): | |
| ''' | |
| A simple callback for finding the optimal learning rate range for your model + dataset. | |