Skip to content

Instantly share code, notes, and snippets.

View scturtle's full-sized avatar
🐢

scturtle

🐢
View GitHub Profile
@scturtle
scturtle / msg.py
Created December 5, 2025 04:58
pal
import os
import struct
class PalMessages:
def __init__(self, pal_path):
self.pal_path = pal_path
mkf_path = os.path.join(self.pal_path, 'Sss.mkf')
self._index_data = self._read_mkf_subfile(mkf_path, 3)
@scturtle
scturtle / bdf2c.py
Created December 5, 2025 03:34
unifont
import sys
import re
def parse_bdf_and_convert(input_file, output_file):
MAX_CHARS = 65536
font_data = [[0, 0, 0, 0] for _ in range(MAX_CHARS)]
font_wide = [False] * MAX_CHARS
current_char = -1
in_bitmap = False
bitmap_lines = []
@scturtle
scturtle / gemma3.py
Last active August 22, 2025 07:33
gemma3 270m
from pathlib import Path
import torch
import torch.nn as nn
import torch.nn.functional as F
from safetensors import safe_open
from tokenizers import Tokenizer
CFG = {
"vocab_size": 262_144,
"context_length": 32_768,
@scturtle
scturtle / gemma3n.py
Last active August 22, 2025 07:32
gemma3n
from dataclasses import dataclass
from pathlib import Path
import torch
from torch import nn
import torch.nn.functional as F
from safetensors import safe_open
from tokenizers import Tokenizer
@scturtle
scturtle / qwen3.py
Last active September 1, 2025 18:58
qwen3
import os
from functools import lru_cache
import torch
from torch import nn
import torch.nn.functional as F
from transformers import Qwen3Config
from transformers import Qwen2TokenizerFast
@scturtle
scturtle / mpi.py
Last active January 20, 2025 09:03
nccl in 500 LOCs
#!/usr/bin/env python3
# https://github.com/FateScript/experiments/blob/main/se/mpi/mpi.py
# https://github.com/facebookincubator/gloo/tree/main/gloo
import math
import multiprocessing
import os
import numpy as np
@scturtle
scturtle / a2c.py
Last active December 24, 2024 03:56
Proximal Policy Optimization
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
import numpy as np
import gym
# Hyperparameters
num_inputs = 4
num_actions = 2
× /
← → ↔ ⇐ ⇒ ⇔
⟵ ⟶ ⟷ ⟸ ⟹ ⟺
∀ ∃
∂ ∫
∈ ∊ ∉
∏ ∑
∙ ⋅
@scturtle
scturtle / llama3.py
Created May 20, 2024 09:43
llama3 in numpy
import numpy as np
class ModelArgs:
dim = 288
n_layers = 6
n_heads = 6
norm_eps = 1e-6
def build_cos_sin_cache(head_dim, seq_len, base=10000):
theta = 1. / (base ** (np.arange(0, head_dim, 2, dtype=np.float32) / head_dim))
@scturtle
scturtle / flash_attention.py
Last active April 11, 2024 16:38
flash attention v1 v2 in numpy
import numpy as np
N_inp = 64
N_out = 64
d = 128
Q = np.random.randn(N_out, d)
K = np.random.randn(N_inp, d)
V = np.random.randn(N_inp, d)
O = np.random.randn(N_out, d)