Last active
March 2, 2026 20:33
-
-
Save BrianLeishman/00061a44a992619dff02087aa937e207 to your computer and use it in GitHub Desktop.
AiM XRK to CSV converter - parses binary kart data logger files into compact 10Hz CSVs
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| package main | |
| import ( | |
| "encoding/binary" | |
| "encoding/csv" | |
| "encoding/json" | |
| "flag" | |
| "fmt" | |
| "math" | |
| "os" | |
| "path/filepath" | |
| "sort" | |
| "strconv" | |
| ) | |
| // --- Binary parsing types --- | |
| type ChannelDef struct { | |
| Index uint16 | |
| ShortName string | |
| LongName string | |
| Size int | |
| DecoderType byte | |
| RateByte byte | |
| Units string | |
| } | |
| type Lap struct { | |
| Number uint16 | |
| DurationMs uint32 | |
| EndTimeMs uint32 | |
| } | |
| type GPSRecord struct { | |
| TC int32 | |
| EcefX int32 | |
| EcefY int32 | |
| EcefZ int32 | |
| EcefVX int32 | |
| EcefVY int32 | |
| EcefVZ int32 | |
| } | |
| type Sample struct { | |
| TC int32 | |
| Raw []byte | |
| } | |
| type ParseResult struct { | |
| Channels map[uint16]*ChannelDef | |
| Groups map[int][]uint16 | |
| Laps []Lap | |
| GPS []GPSRecord | |
| ChannelSamples map[uint16][]Sample | |
| Metadata map[string]string | |
| } | |
| // --- Unit map --- | |
| var unitMap = map[byte]string{ | |
| 1: "%", 3: "G", 4: "deg", 5: "deg/s", | |
| 6: "", 9: "Hz", 11: "", 12: "mm", | |
| 14: "bar", 15: "rpm", 16: "km/h", 17: "C", | |
| 18: "ms", 19: "Nm", 20: "km/h", 21: "V", | |
| 22: "l", 24: "l/s", 26: "time", 27: "A", | |
| 30: "lambda", 31: "gear", 33: "%", 43: "kg", | |
| } | |
| func nullterm(b []byte) string { | |
| for i, c := range b { | |
| if c == 0 { | |
| return string(b[:i]) | |
| } | |
| } | |
| return string(b) | |
| } | |
| func le16(b []byte) uint16 { return binary.LittleEndian.Uint16(b) } | |
| func le32(b []byte) uint32 { return binary.LittleEndian.Uint32(b) } | |
| func lei32(b []byte) int32 { return int32(binary.LittleEndian.Uint32(b)) } | |
| func tokenStr(v uint32) string { | |
| s := "" | |
| for v > 0 { | |
| s += string(rune(v & 0xFF)) | |
| v >>= 8 | |
| } | |
| return s | |
| } | |
| func parseXRK(path string) (*ParseResult, error) { | |
| data, err := os.ReadFile(path) | |
| if err != nil { | |
| return nil, err | |
| } | |
| r := &ParseResult{ | |
| Channels: make(map[uint16]*ChannelDef), | |
| Groups: make(map[int][]uint16), | |
| ChannelSamples: make(map[uint16][]Sample), | |
| Metadata: make(map[string]string), | |
| } | |
| size := len(data) | |
| pos := 0 | |
| for pos < size-1 { | |
| b0, b1 := data[pos], data[pos+1] | |
| if b0 == '<' && b1 == 'h' { | |
| if pos+11 >= size { | |
| pos++ | |
| continue | |
| } | |
| token := le32(data[pos+2:]) | |
| hlen := int(lei32(data[pos+6:])) | |
| ps := pos + 12 | |
| pe := ps + hlen | |
| if pe > size || hlen < 0 { | |
| pos++ | |
| continue | |
| } | |
| payload := data[ps:pe] | |
| tok := tokenStr(token) | |
| switch tok { | |
| case "CNF": | |
| pos = ps | |
| continue | |
| case "CHS": | |
| if len(payload) >= 112 { | |
| idx := le16(payload) | |
| ch := &ChannelDef{ | |
| Index: idx, | |
| ShortName: nullterm(payload[24:32]), | |
| LongName: nullterm(payload[32:56]), | |
| Size: int(payload[72]), | |
| DecoderType: payload[20], | |
| RateByte: payload[64] & 0x7F, | |
| } | |
| ui := payload[12] & 0x7F | |
| if u, ok := unitMap[ui]; ok { | |
| ch.Units = u | |
| } | |
| r.Channels[idx] = ch | |
| } | |
| case "GRP": | |
| if len(payload) >= 2 { | |
| var chs []uint16 | |
| for i := 0; i+1 < len(payload); i += 2 { | |
| chs = append(chs, le16(payload[i:])) | |
| } | |
| r.Groups[len(r.Groups)] = chs | |
| } | |
| case "LAP": | |
| if len(payload) >= 20 && payload[1] == 0 { | |
| r.Laps = append(r.Laps, Lap{ | |
| Number: le16(payload[2:]), | |
| DurationMs: le32(payload[4:]), | |
| EndTimeMs: le32(payload[16:]), | |
| }) | |
| } | |
| case "GPS", "GPS1": | |
| for i := 0; i+55 < len(payload); i += 56 { | |
| r.GPS = append(r.GPS, GPSRecord{ | |
| TC: lei32(payload[i:]), | |
| EcefX: lei32(payload[i+16:]), | |
| EcefY: lei32(payload[i+20:]), | |
| EcefZ: lei32(payload[i+24:]), | |
| EcefVX: lei32(payload[i+32:]), | |
| EcefVY: lei32(payload[i+36:]), | |
| EcefVZ: lei32(payload[i+40:]), | |
| }) | |
| } | |
| case "TRK": | |
| if len(payload) >= 32 { | |
| r.Metadata["track"] = nullterm(payload[:32]) | |
| } | |
| case "RCR": | |
| r.Metadata["racer"] = nullterm(payload) | |
| case "VEH": | |
| r.Metadata["vehicle"] = nullterm(payload) | |
| case "TMD": | |
| r.Metadata["date"] = nullterm(payload) | |
| case "TMT": | |
| r.Metadata["time"] = nullterm(payload) | |
| case "VTY": | |
| r.Metadata["session_type"] = nullterm(payload) | |
| } | |
| if pe+8 <= size { | |
| pos = pe + 8 | |
| } else { | |
| pos = pe | |
| } | |
| continue | |
| } | |
| if b0 == '(' && b1 == 'G' { | |
| if pos+9 >= size { | |
| pos++ | |
| continue | |
| } | |
| tc := lei32(data[pos+2:]) | |
| gi := int(le16(data[pos+6:])) | |
| if chs, ok := r.Groups[gi]; ok { | |
| off := 8 | |
| for _, ci := range chs { | |
| if ch, ok := r.Channels[ci]; ok { | |
| sz := ch.Size | |
| if sz == 0 { | |
| sz = 4 | |
| } | |
| if pos+off+sz < size { | |
| raw := make([]byte, sz) | |
| copy(raw, data[pos+off:pos+off+sz]) | |
| r.ChannelSamples[ci] = append(r.ChannelSamples[ci], Sample{TC: tc, Raw: raw}) | |
| } | |
| off += sz | |
| } | |
| } | |
| pos += off | |
| for pos < size && data[pos] != ')' { | |
| pos++ | |
| } | |
| pos++ | |
| } else { | |
| pos++ | |
| } | |
| continue | |
| } | |
| if b0 == '(' && b1 == 'S' { | |
| if pos+9 >= size { | |
| pos++ | |
| continue | |
| } | |
| tc := lei32(data[pos+2:]) | |
| ci := le16(data[pos+6:]) | |
| if ch, ok := r.Channels[ci]; ok { | |
| sz := ch.Size | |
| if sz == 0 { | |
| sz = 4 | |
| } | |
| if pos+8+sz < size { | |
| raw := make([]byte, sz) | |
| copy(raw, data[pos+8:pos+8+sz]) | |
| r.ChannelSamples[ci] = append(r.ChannelSamples[ci], Sample{TC: tc, Raw: raw}) | |
| } | |
| pos += 8 + sz + 1 | |
| } else { | |
| pos++ | |
| } | |
| continue | |
| } | |
| if b0 == '(' && b1 == 'M' { | |
| if pos+11 >= size { | |
| pos++ | |
| continue | |
| } | |
| tc := lei32(data[pos+2:]) | |
| ci := le16(data[pos+6:]) | |
| count := int(le16(data[pos+8:])) | |
| if ch, ok := r.Channels[ci]; ok { | |
| sz := ch.Size | |
| if sz == 0 { | |
| sz = 4 | |
| } | |
| rb := int(ch.RateByte) | |
| dt := 10 | |
| if rb > 0 && sz > 0 { | |
| dt = rb / sz | |
| } | |
| for i := 0; i < count; i++ { | |
| so := 10 + i*sz | |
| if pos+so+sz < size { | |
| raw := make([]byte, sz) | |
| copy(raw, data[pos+so:pos+so+sz]) | |
| r.ChannelSamples[ci] = append(r.ChannelSamples[ci], Sample{TC: tc + int32(i*dt), Raw: raw}) | |
| } | |
| } | |
| pos += 10 + count*sz + 1 | |
| } else { | |
| pos++ | |
| } | |
| continue | |
| } | |
| pos++ | |
| } | |
| sort.Slice(r.GPS, func(i, j int) bool { return r.GPS[i].TC < r.GPS[j].TC }) | |
| return r, nil | |
| } | |
| // --- Value decoding --- | |
| func decodeValue(raw []byte, ch *ChannelDef) (float64, bool) { | |
| sz := ch.Size | |
| if len(raw) < sz { | |
| return 0, false | |
| } | |
| switch ch.DecoderType { | |
| case 0, 3, 12, 24: | |
| if sz >= 4 { | |
| return float64(lei32(raw)), true | |
| } | |
| case 1, 20: | |
| if sz >= 2 { | |
| return float64(float16ToFloat32(le16(raw))), true | |
| } | |
| case 4, 11: | |
| if sz >= 2 { | |
| return float64(int16(le16(raw))), true | |
| } | |
| case 6: | |
| if sz >= 4 { | |
| return float64(math.Float32frombits(le32(raw))), true | |
| } | |
| case 13: | |
| return float64(raw[0]), true | |
| case 15: | |
| if sz >= 2 { | |
| return float64(le16(raw)), true | |
| } | |
| default: | |
| if sz == 2 { | |
| return float64(int16(le16(raw))), true | |
| } | |
| if sz == 4 { | |
| return float64(math.Float32frombits(le32(raw))), true | |
| } | |
| } | |
| return 0, false | |
| } | |
| func float16ToFloat32(h uint16) float32 { | |
| sign := uint32(h>>15) & 1 | |
| exp := uint32(h>>10) & 0x1F | |
| mant := uint32(h) & 0x3FF | |
| if exp == 0 { | |
| if mant == 0 { | |
| return math.Float32frombits(sign << 31) | |
| } | |
| for mant&0x400 == 0 { | |
| mant <<= 1 | |
| exp-- | |
| } | |
| exp++ | |
| mant &= 0x3FF | |
| exp += 127 - 15 | |
| return math.Float32frombits((sign << 31) | (exp << 23) | (mant << 13)) | |
| } | |
| if exp == 0x1F { | |
| if mant == 0 { | |
| return math.Float32frombits((sign << 31) | 0x7F800000) | |
| } | |
| return math.Float32frombits((sign << 31) | 0x7FC00000) | |
| } | |
| exp += 127 - 15 | |
| return math.Float32frombits((sign << 31) | (exp << 23) | (mant << 13)) | |
| } | |
| // --- Coordinate helpers --- | |
| func degreesToRad(deg float64) float64 { return deg * math.Pi / 180 } | |
| func radToDegrees(rad float64) float64 { return rad * 180 / math.Pi } | |
| func ecefToLatLonAlt(xCm, yCm, zCm int32) (lat, lon, alt float64) { | |
| x := float64(xCm) / 100.0 | |
| y := float64(yCm) / 100.0 | |
| z := float64(zCm) / 100.0 | |
| a := 6378137.0 | |
| b := 6356752.314245 | |
| e2 := 1 - (b*b)/(a*a) | |
| ep2 := (a*a)/(b*b) - 1 | |
| p := math.Sqrt(x*x + y*y) | |
| lon = math.Atan2(y, x) | |
| theta := math.Atan2(z*a, p*b) | |
| lat = math.Atan2(z+ep2*b*math.Pow(math.Sin(theta), 3), p-e2*a*math.Pow(math.Cos(theta), 3)) | |
| sinLat := math.Sin(lat) | |
| N := a / math.Sqrt(1-e2*sinLat*sinLat) | |
| if math.Abs(lat) < math.Pi/4 { | |
| alt = p/math.Cos(lat) - N | |
| } else { | |
| alt = z/math.Sin(lat) - N*(1-e2) | |
| } | |
| return radToDegrees(lat), radToDegrees(lon), alt | |
| } | |
| func haversineFt(lat1, lon1, lat2, lon2 float64) float64 { | |
| dlat := degreesToRad(lat2 - lat1) | |
| dlon := degreesToRad(lon2 - lon1) | |
| a := math.Sin(dlat/2)*math.Sin(dlat/2) + | |
| math.Cos(degreesToRad(lat1))*math.Cos(degreesToRad(lat2))*math.Sin(dlon/2)*math.Sin(dlon/2) | |
| c := 2 * math.Atan2(math.Sqrt(a), math.Sqrt(1-a)) | |
| return 6371000 * c * 3.28084 | |
| } | |
| // --- Decoded time series for a channel --- | |
| type TVPair struct { | |
| TC int32 | |
| Val float64 | |
| } | |
| func decodeChannel(samples []Sample, ch *ChannelDef) []TVPair { | |
| var out []TVPair | |
| for _, s := range samples { | |
| if v, ok := decodeValue(s.Raw, ch); ok { | |
| out = append(out, TVPair{TC: s.TC, Val: v}) | |
| } | |
| } | |
| sort.Slice(out, func(i, j int) bool { return out[i].TC < out[j].TC }) | |
| return out | |
| } | |
| // --- GPS output row --- | |
| type GPSRow struct { | |
| TC int32 | |
| Lat float64 | |
| Lon float64 | |
| Alt float64 | |
| SpeedMPH float64 | |
| CumDist float64 | |
| } | |
| func buildGPSRows(records []GPSRecord) []GPSRow { | |
| var rows []GPSRow | |
| for _, rec := range records { | |
| if rec.EcefX == 0 && rec.EcefY == 0 { | |
| continue | |
| } | |
| lat, lon, alt := ecefToLatLonAlt(rec.EcefX, rec.EcefY, rec.EcefZ) | |
| vx := float64(rec.EcefVX) / 100.0 | |
| vy := float64(rec.EcefVY) / 100.0 | |
| vz := float64(rec.EcefVZ) / 100.0 | |
| speed := math.Sqrt(vx*vx+vy*vy+vz*vz) * 3.6 * 0.621371 | |
| rows = append(rows, GPSRow{TC: rec.TC, Lat: lat, Lon: lon, Alt: alt, SpeedMPH: speed}) | |
| } | |
| sort.Slice(rows, func(i, j int) bool { return rows[i].TC < rows[j].TC }) | |
| for i := 1; i < len(rows); i++ { | |
| d := haversineFt(rows[i-1].Lat, rows[i-1].Lon, rows[i].Lat, rows[i].Lon) | |
| dt := float64(rows[i].TC-rows[i-1].TC) / 1000.0 | |
| if dt > 0 && d/dt < 300 { | |
| rows[i].CumDist = rows[i-1].CumDist + d | |
| } else { | |
| rows[i].CumDist = rows[i-1].CumDist | |
| } | |
| } | |
| return rows | |
| } | |
| // --- Output types --- | |
| type LapSummary struct { | |
| Lap int `json:"lap"` | |
| Time string `json:"time"` | |
| Ms int `json:"ms"` | |
| MaxMPH float64 `json:"max_mph"` | |
| MinMPH float64 `json:"min_mph"` | |
| MaxLatG float64 `json:"max_lat_g"` | |
| AvgLatG float64 `json:"avg_lat_g"` | |
| DistFt float64 `json:"dist_ft"` | |
| GPSPoints int `json:"gps_points"` | |
| IMUPoints int `json:"imu_points"` | |
| GPSFile string `json:"gps_file"` | |
| SensorFile string `json:"sensor_file"` | |
| } | |
| type SessionSummary struct { | |
| Session int `json:"session"` | |
| FileID string `json:"file_id"` | |
| Date string `json:"date"` | |
| Time string `json:"time"` | |
| Racer string `json:"racer"` | |
| Vehicle string `json:"vehicle"` | |
| Track string `json:"track"` | |
| Channels []ChannelInfo `json:"channels"` | |
| Laps []LapSummary `json:"laps"` | |
| } | |
| type ChannelInfo struct { | |
| ShortName string `json:"short_name"` | |
| LongName string `json:"long_name"` | |
| Units string `json:"units"` | |
| } | |
| func fmtFloat(v float64) string { | |
| return strconv.FormatFloat(v, 'f', -1, 64) | |
| } | |
| func main() { | |
| compact := flag.Bool("compact", false, "Output compact 10Hz CSVs (for AI analysis)") | |
| flag.Parse() | |
| args := flag.Args() | |
| if len(args) < 2 { | |
| fmt.Fprintf(os.Stderr, "Usage: %s [-compact] <output-dir> <file1.xrk> [file2.xrk ...]\n", os.Args[0]) | |
| os.Exit(1) | |
| } | |
| outDir := args[0] | |
| files := args[1:] | |
| if err := os.MkdirAll(outDir, 0o755); err != nil { | |
| fmt.Fprintf(os.Stderr, "Error creating output dir: %v\n", err) | |
| os.Exit(1) | |
| } | |
| var allSummaries []SessionSummary | |
| for sessNum, path := range files { | |
| sessIdx := sessNum + 1 | |
| fileID := filepath.Base(path) | |
| if len(fileID) > 8 { | |
| ext := filepath.Ext(fileID) | |
| base := fileID[:len(fileID)-len(ext)] | |
| if len(base) >= 4 { | |
| fileID = base[len(base)-4:] | |
| } | |
| } | |
| fmt.Fprintf(os.Stderr, "Parsing session %d: %s\n", sessIdx, filepath.Base(path)) | |
| result, err := parseXRK(path) | |
| if err != nil { | |
| fmt.Fprintf(os.Stderr, " Error: %v\n", err) | |
| continue | |
| } | |
| // Build GPS rows | |
| gpsRows := buildGPSRows(result.GPS) | |
| if len(gpsRows) == 0 { | |
| fmt.Fprintf(os.Stderr, " No GPS data found\n") | |
| continue | |
| } | |
| // Identify sensor channels (skip computed/timing channels that have no real samples) | |
| // Channels we want: anything with actual sample data and a meaningful decoder | |
| type sensorChannel struct { | |
| idx uint16 | |
| ch *ChannelDef | |
| data []TVPair | |
| } | |
| var sensorChannels []sensorChannel | |
| // Sort channel indices for consistent column order | |
| var chIndices []uint16 | |
| for idx := range result.Channels { | |
| chIndices = append(chIndices, idx) | |
| } | |
| sort.Slice(chIndices, func(i, j int) bool { return chIndices[i] < chIndices[j] }) | |
| for _, idx := range chIndices { | |
| ch := result.Channels[idx] | |
| samples := result.ChannelSamples[idx] | |
| if len(samples) == 0 { | |
| continue | |
| } | |
| // Skip channels with very large sample sizes (compound types like Lap Time = 20 bytes) | |
| if ch.Size > 8 { | |
| continue | |
| } | |
| decoded := decodeChannel(samples, ch) | |
| if len(decoded) == 0 { | |
| continue | |
| } | |
| sensorChannels = append(sensorChannels, sensorChannel{idx: idx, ch: ch, data: decoded}) | |
| } | |
| // Build session summary | |
| sess := SessionSummary{ | |
| Session: sessIdx, | |
| FileID: fileID, | |
| Date: result.Metadata["date"], | |
| Time: result.Metadata["time"], | |
| Racer: result.Metadata["racer"], | |
| Vehicle: result.Metadata["vehicle"], | |
| Track: result.Metadata["track"], | |
| } | |
| for _, sc := range sensorChannels { | |
| sess.Channels = append(sess.Channels, ChannelInfo{ | |
| ShortName: sc.ch.ShortName, | |
| LongName: sc.ch.LongName, | |
| Units: sc.ch.Units, | |
| }) | |
| } | |
| // Process each lap | |
| for _, lap := range result.Laps { | |
| startTC := int32(lap.EndTimeMs - lap.DurationMs) | |
| endTC := int32(lap.EndTimeMs) | |
| // --- GPS CSV --- | |
| var lapGPS []GPSRow | |
| for _, gp := range gpsRows { | |
| if gp.TC >= startTC && gp.TC <= endTC { | |
| lapGPS = append(lapGPS, gp) | |
| } | |
| } | |
| if len(lapGPS) == 0 { | |
| continue | |
| } | |
| baseDist := lapGPS[0].CumDist | |
| baseTC := startTC | |
| gpsName := fmt.Sprintf("s%d_lap%02d_gps.csv", sessIdx, lap.Number) | |
| gpsPath := filepath.Join(outDir, gpsName) | |
| if *compact { | |
| // Compact mode: single merged file at 10Hz | |
| gpsName = fmt.Sprintf("s%d_lap%02d.csv", sessIdx, lap.Number) | |
| gpsPath = filepath.Join(outDir, gpsName) | |
| } | |
| gf, err := os.Create(gpsPath) | |
| if err != nil { | |
| fmt.Fprintf(os.Stderr, " Error creating %s: %v\n", gpsName, err) | |
| continue | |
| } | |
| gw := csv.NewWriter(gf) | |
| if *compact { | |
| // Compact mode: merged file, 10Hz, rounded | |
| gw.Write([]string{"tc_ms", "dist_ft", "lat", "lon", "speed_mph", "lat_g", "lon_g", "yaw_dps"}) | |
| // Build lookup series for compact mode | |
| var latGData, inlGData, yawData []TVPair | |
| for _, sc := range sensorChannels { | |
| switch sc.ch.ShortName { | |
| case "LatA": | |
| latGData = sc.data | |
| case "InlA": | |
| inlGData = sc.data | |
| case "YawR": | |
| yawData = sc.data | |
| } | |
| } | |
| var lastTC int32 = -999 | |
| for _, gp := range lapGPS { | |
| if gp.TC-lastTC < 90 { | |
| continue | |
| } | |
| lastTC = gp.TC | |
| row := []string{ | |
| strconv.Itoa(int(gp.TC - baseTC)), | |
| fmtFloat(math.Round((gp.CumDist - baseDist) * 10) / 10), | |
| strconv.FormatFloat(gp.Lat, 'f', 7, 64), | |
| strconv.FormatFloat(gp.Lon, 'f', 7, 64), | |
| fmtFloat(math.Round(gp.SpeedMPH*10) / 10), | |
| nearestStr(latGData, gp.TC), | |
| nearestStr(inlGData, gp.TC), | |
| nearestStr(yawData, gp.TC), | |
| } | |
| gw.Write(row) | |
| } | |
| } else { | |
| // Full mode: every GPS point, full precision | |
| gw.Write([]string{"tc_ms", "lat", "lon", "alt_m", "speed_mph", "dist_ft"}) | |
| for _, gp := range lapGPS { | |
| gw.Write([]string{ | |
| strconv.Itoa(int(gp.TC - baseTC)), | |
| strconv.FormatFloat(gp.Lat, 'f', 10, 64), | |
| strconv.FormatFloat(gp.Lon, 'f', 10, 64), | |
| strconv.FormatFloat(gp.Alt, 'f', 3, 64), | |
| fmtFloat(gp.SpeedMPH), | |
| fmtFloat(gp.CumDist - baseDist), | |
| }) | |
| } | |
| } | |
| gw.Flush() | |
| gf.Close() | |
| // --- Sensor CSV (full mode only) --- | |
| sensorName := fmt.Sprintf("s%d_lap%02d_sensors.csv", sessIdx, lap.Number) | |
| imuPoints := 0 | |
| if !*compact { | |
| // Collect all unique timecodes across sensor channels for this lap | |
| tcSet := make(map[int32]bool) | |
| lapSensorData := make([][]TVPair, len(sensorChannels)) | |
| for i, sc := range sensorChannels { | |
| var lapData []TVPair | |
| for _, tv := range sc.data { | |
| if tv.TC >= startTC && tv.TC <= endTC { | |
| lapData = append(lapData, tv) | |
| tcSet[tv.TC] = true | |
| } | |
| } | |
| lapSensorData[i] = lapData | |
| } | |
| // Sort timecodes | |
| var allTCs []int32 | |
| for tc := range tcSet { | |
| allTCs = append(allTCs, tc) | |
| } | |
| sort.Slice(allTCs, func(i, j int) bool { return allTCs[i] < allTCs[j] }) | |
| imuPoints = len(allTCs) | |
| // Build index maps for fast lookup: tc -> value | |
| chMaps := make([]map[int32]float64, len(sensorChannels)) | |
| for i, ld := range lapSensorData { | |
| m := make(map[int32]float64, len(ld)) | |
| for _, tv := range ld { | |
| m[tv.TC] = tv.Val | |
| } | |
| chMaps[i] = m | |
| } | |
| // Write sensor CSV | |
| sensorPath := filepath.Join(outDir, sensorName) | |
| sf, err := os.Create(sensorPath) | |
| if err != nil { | |
| fmt.Fprintf(os.Stderr, " Error creating %s: %v\n", sensorName, err) | |
| } else { | |
| sw := csv.NewWriter(sf) | |
| header := []string{"tc_ms"} | |
| for _, sc := range sensorChannels { | |
| colName := sc.ch.ShortName | |
| if sc.ch.Units != "" { | |
| colName += "_" + sc.ch.Units | |
| } | |
| header = append(header, colName) | |
| } | |
| sw.Write(header) | |
| for _, tc := range allTCs { | |
| row := []string{strconv.Itoa(int(tc - baseTC))} | |
| for i := range sensorChannels { | |
| if v, ok := chMaps[i][tc]; ok { | |
| row = append(row, fmtFloat(v)) | |
| } else { | |
| row = append(row, "") | |
| } | |
| } | |
| sw.Write(row) | |
| } | |
| sw.Flush() | |
| sf.Close() | |
| } | |
| } | |
| // Summary stats | |
| durSec := float64(lap.DurationMs) / 1000.0 | |
| mins := int(durSec) / 60 | |
| secs := durSec - float64(mins*60) | |
| maxMPH, minMPH := 0.0, 999.0 | |
| for _, gp := range lapGPS { | |
| if gp.SpeedMPH > maxMPH { | |
| maxMPH = gp.SpeedMPH | |
| } | |
| if gp.SpeedMPH < minMPH { | |
| minMPH = gp.SpeedMPH | |
| } | |
| } | |
| // Lat G stats from sensor data | |
| maxLatG, sumLatG := 0.0, 0.0 | |
| latGCount := 0 | |
| for _, sc := range sensorChannels { | |
| if sc.ch.ShortName != "LatA" { | |
| continue | |
| } | |
| for _, tv := range sc.data { | |
| if tv.TC >= startTC && tv.TC <= endTC { | |
| av := math.Abs(tv.Val) | |
| sumLatG += av | |
| latGCount++ | |
| if av > maxLatG { | |
| maxLatG = av | |
| } | |
| } | |
| } | |
| } | |
| avgLatG := 0.0 | |
| if latGCount > 0 { | |
| avgLatG = sumLatG / float64(latGCount) | |
| } | |
| distFt := 0.0 | |
| if len(lapGPS) > 0 { | |
| distFt = lapGPS[len(lapGPS)-1].CumDist - baseDist | |
| } | |
| ls := LapSummary{ | |
| Lap: int(lap.Number), | |
| Time: fmt.Sprintf("%d:%06.3f", mins, secs), | |
| Ms: int(lap.DurationMs), | |
| MaxMPH: maxMPH, | |
| MinMPH: minMPH, | |
| MaxLatG: maxLatG, | |
| AvgLatG: avgLatG, | |
| DistFt: distFt, | |
| GPSPoints: len(lapGPS), | |
| IMUPoints: imuPoints, | |
| GPSFile: gpsName, | |
| SensorFile: sensorName, | |
| } | |
| if *compact { | |
| ls.SensorFile = "" | |
| } | |
| sess.Laps = append(sess.Laps, ls) | |
| fmt.Fprintf(os.Stderr, " Lap %d: %s (GPS: %d pts, Sensors: %d pts, %.0f ft)\n", | |
| lap.Number, fmt.Sprintf("%d:%06.3f", mins, secs), len(lapGPS), imuPoints, distFt) | |
| } | |
| allSummaries = append(allSummaries, sess) | |
| } | |
| // Write summary JSON | |
| summaryPath := filepath.Join(outDir, "summary.json") | |
| sf, err := os.Create(summaryPath) | |
| if err != nil { | |
| fmt.Fprintf(os.Stderr, "Error creating summary: %v\n", err) | |
| os.Exit(1) | |
| } | |
| enc := json.NewEncoder(sf) | |
| enc.SetIndent("", " ") | |
| enc.Encode(allSummaries) | |
| sf.Close() | |
| var totalSize int64 | |
| entries, _ := os.ReadDir(outDir) | |
| for _, e := range entries { | |
| info, _ := e.Info() | |
| totalSize += info.Size() | |
| } | |
| fmt.Fprintf(os.Stderr, "\nOutput: %s (%d files, %.1f MB total)\n", outDir, len(entries), float64(totalSize)/1024/1024) | |
| } | |
| // nearestStr finds the nearest value in a sorted TVPair slice and returns it as a string. | |
| // Used in compact mode for interpolating sensor data at GPS timestamps. | |
| func nearestStr(data []TVPair, tc int32) string { | |
| if len(data) == 0 { | |
| return "" | |
| } | |
| lo, hi := 0, len(data)-1 | |
| for lo < hi { | |
| mid := (lo + hi) / 2 | |
| if data[mid].TC < tc { | |
| lo = mid + 1 | |
| } else { | |
| hi = mid | |
| } | |
| } | |
| best := lo | |
| if best > 0 { | |
| if abs32(data[best-1].TC-tc) < abs32(data[best].TC-tc) { | |
| best-- | |
| } | |
| } | |
| if abs32(data[best].TC-tc) > 500 { | |
| return "" | |
| } | |
| return fmtFloat(data[best].Val) | |
| } | |
| func abs32(v int32) int32 { | |
| if v < 0 { | |
| return -v | |
| } | |
| return v | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment