Created
September 9, 2025 10:06
-
-
Save hagen1778/347ed1c6adbd8ced96656e89e65d8b1e to your computer and use it in GitHub Desktop.
This file contains hidden or bidirectional Unicode text that may be interpreted or compiled differently than what appears below. To review, open the file in an editor that reveals hidden Unicode characters.
Learn more about bidirectional Unicode characters
| package main | |
| import ( | |
| "bytes" | |
| "encoding/json" | |
| "fmt" | |
| "log" | |
| "os" | |
| "time" | |
| ) | |
| // file.json can be retrieved via vmui ExportQuery feature: | |
| // 1. Go to https://play.victoriametrics.com/select/0/prometheus/graph/#/?g0.range_input=30m&g0.end_input=2025-09-09T10%3A05%3A45&g0.relative_time=last_30_minutes&g0.tab=1&g0.tenantID=0&g0.expr=%7Bjob%3D%22vector%22%7D&g0.step_input=30m | |
| // 2. Specify the job you want | |
| // 3. Execute query | |
| // 4. Press Export Query in JSON format in top right corner of working area | |
| // 5. Feed this file to function below | |
| func main() { | |
| parseJsonMetrics("file.json") | |
| } | |
| // Record is one item from the payload array. | |
| type Record struct { | |
| Metric map[string]string `json:"metric"` | |
| Value SamplePoint `json:"value"` | |
| Group int `json:"group"` | |
| } | |
| // SamplePoint corresponds to "value": [ <ts>, <val> ] | |
| // Example: [1757411273.665, "1653588058"] | |
| type SamplePoint struct { | |
| TS float64 // seconds since epoch (may have fractional part) | |
| Val string // raw sample value as string | |
| } | |
| // UnmarshalJSON implements parsing for the [ts, value] array. | |
| // It is tolerant to both numeric and string encodings for the second element. | |
| func (sp *SamplePoint) UnmarshalJSON(b []byte) error { | |
| var raw []any | |
| if err := json.Unmarshal(b, &raw); err != nil { | |
| return fmt.Errorf("value: not an array: %w", err) | |
| } | |
| if len(raw) != 2 { | |
| return fmt.Errorf("value: expected 2 elements, got %d", len(raw)) | |
| } | |
| // Parse timestamp (first item) as float64 | |
| switch t := raw[0].(type) { | |
| case float64: | |
| sp.TS = t | |
| case json.Number: | |
| f, err := t.Float64() | |
| if err != nil { | |
| return fmt.Errorf("value[0]: invalid number: %w", err) | |
| } | |
| sp.TS = f | |
| default: | |
| return fmt.Errorf("value[0]: expected number, got %T", raw[0]) | |
| } | |
| // Parse sample value (second item) as string (accept number too) | |
| switch v := raw[1].(type) { | |
| case string: | |
| sp.Val = v | |
| case float64: | |
| // preserve exact textual form for integers if possible | |
| // but generally %g is reasonable; adjust if you need fixed format | |
| sp.Val = fmt.Sprintf("%g", v) | |
| case json.Number: | |
| sp.Val = v.String() | |
| default: | |
| return fmt.Errorf("value[1]: expected string or number, got %T", raw[1]) | |
| } | |
| return nil | |
| } | |
| // Time returns the timestamp as time.Time (UTC). | |
| func (sp SamplePoint) Time() time.Time { | |
| sec := int64(sp.TS) | |
| nsec := int64((sp.TS - float64(sec)) * 1e9) | |
| return time.Unix(sec, nsec).UTC() | |
| } | |
| func parseJsonMetrics(file string) { | |
| b, err := os.ReadFile(file) | |
| if err != nil { | |
| log.Fatal(err) | |
| } | |
| bb := bytes.NewReader(b) | |
| var records []Record | |
| dec := json.NewDecoder(bb) | |
| dec.UseNumber() // be more tolerant with numbers | |
| if err := dec.Decode(&records); err != nil { | |
| fmt.Fprintf(os.Stderr, "decode error: %v\n", err) | |
| os.Exit(1) | |
| } | |
| totalItems := len(records) | |
| totalBytes := 0 | |
| totalLabels := 0 | |
| for _, record := range records { | |
| totalLabels += len(record.Metric) | |
| for k, v := range record.Metric { | |
| totalBytes += len(k) + len(v) | |
| } | |
| } | |
| fmt.Println("------") | |
| fmt.Println("File:", file) | |
| fmt.Println("Total series:", totalItems) | |
| fmt.Println("Avg labels per series:", totalLabels/totalItems) | |
| fmt.Println("Avg labels set size per series:", totalBytes/totalItems) | |
| } |
Sign up for free
to join this conversation on GitHub.
Already have an account?
Sign in to comment