Prometheus: Improve response parser performance (#101517)

* introduce tests

* improve allocation by %61

* ~%27 improvement

goos: darwin
goarch: arm64
pkg: github.com/grafana/grafana/pkg/promlib/converter
cpu: Apple M1 Pro
                                                            │  pmem.0.txt  │             pmem.2.txt             │
                                                            │    sec/op    │   sec/op     vs base               │
ReadPrometheusStyleResult_FromFile/prom-query-range.json-10   117.72µ ± 6%   82.90µ ± 4%  -29.57% (p=0.002 n=6)

                                                            │  pmem.0.txt   │             pmem.2.txt              │
                                                            │     B/op      │     B/op      vs base               │
ReadPrometheusStyleResult_FromFile/prom-query-range.json-10   104.34Ki ± 0%   76.09Ki ± 0%  -27.08% (p=0.002 n=6)

                                                            │ pmem.0.txt  │            pmem.2.txt             │
                                                            │  allocs/op  │ allocs/op   vs base               │
ReadPrometheusStyleResult_FromFile/prom-query-range.json-10   2463.0 ± 0%   899.0 ± 0%  -63.50% (p=0.002 n=6)

* add more tests

* remove comment lines

* read string as slice to prevent type conversion

* golang lint

* revert appendValueFromString improvement

* fix merging issues
This commit is contained in:
ismail simsek
2025-03-12 13:16:19 +01:00
committed by GitHub
parent df0df68f30
commit f64be062c4
7 changed files with 99718 additions and 21 deletions

View File

@ -597,17 +597,17 @@ func readScalar(iter *sdkjsoniter.Iterator, dataPlane bool) backend.DataResponse
func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt Options) backend.DataResponse { func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt Options) backend.DataResponse {
rsp := backend.DataResponse{} rsp := backend.DataResponse{}
size := 0
for more, err := iter.ReadArray(); more; more, err = iter.ReadArray() { for more, err := iter.ReadArray(); more; more, err = iter.ReadArray() {
if err != nil { if err != nil {
return rspErr(err) return rspErr(err)
} }
timeField := data.NewFieldFromFieldType(data.FieldTypeTime, 0)
timeField.Name = data.TimeSeriesTimeFieldName
valueField := data.NewFieldFromFieldType(data.FieldTypeFloat64, 0)
valueField.Name = data.TimeSeriesValueFieldName
valueField.Labels = data.Labels{}
// First read all values to temporary storage
tempTimes := make([]time.Time, 0, size)
tempValues := make([]float64, 0, size)
var labels data.Labels
var histogram *histogramInfo var histogram *histogramInfo
for l1Field, err := iter.ReadObject(); l1Field != ""; l1Field, err = iter.ReadObject() { for l1Field, err := iter.ReadObject(); l1Field != ""; l1Field, err = iter.ReadObject() {
@ -616,7 +616,7 @@ func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt
} }
switch l1Field { switch l1Field {
case "metric": case "metric":
if err = iter.ReadVal(&valueField.Labels); err != nil { if err = iter.ReadVal(&labels); err != nil {
return rspErr(err) return rspErr(err)
} }
@ -625,10 +625,9 @@ func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt
if err != nil { if err != nil {
return rspErr(err) return rspErr(err)
} }
timeField.Append(t) tempTimes = append(tempTimes, t)
valueField.Append(v) tempValues = append(tempValues, v)
// nolint:goconst
case "values": case "values":
for more, err := iter.ReadArray(); more; more, err = iter.ReadArray() { for more, err := iter.ReadArray(); more; more, err = iter.ReadArray() {
if err != nil { if err != nil {
@ -638,8 +637,8 @@ func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt
if err != nil { if err != nil {
return rspErr(err) return rspErr(err)
} }
timeField.Append(t) tempTimes = append(tempTimes, t)
valueField.Append(v) tempValues = append(tempValues, v)
} }
case "histogram": case "histogram":
@ -673,21 +672,18 @@ func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt
} }
if histogram != nil { if histogram != nil {
histogram.yMin.Labels = valueField.Labels histogram.yMin.Labels = labels
histogram.yMax.Labels = valueField.Labels histogram.yMax.Labels = labels
histogram.count.Labels = valueField.Labels histogram.count.Labels = labels
histogram.yLayout.Labels = valueField.Labels histogram.yLayout.Labels = labels
histogram.time.Labels = valueField.Labels histogram.time.Labels = labels
frame := data.NewFrame(valueField.Name, histogram.time, histogram.yMin, histogram.yMax, histogram.count, histogram.yLayout) frame := data.NewFrame("", histogram.time, histogram.yMin, histogram.yMax, histogram.count, histogram.yLayout)
frame.Meta = &data.FrameMeta{ frame.Meta = &data.FrameMeta{
Type: "heatmap-cells", Type: "heatmap-cells",
} }
if frame.Name == data.TimeSeriesValueFieldName {
frame.Name = "" // only set the name if useful
}
rsp.Frames = append(rsp.Frames, frame) rsp.Frames = append(rsp.Frames, frame)
} else { } else {
frame := data.NewFrame("", timeField, valueField) frame := data.NewFrame("", data.NewField(data.TimeSeriesTimeFieldName, nil, tempTimes), data.NewField(data.TimeSeriesValueFieldName, labels, tempValues))
frame.Meta = &data.FrameMeta{ frame.Meta = &data.FrameMeta{
Type: data.FrameTypeTimeSeriesMulti, Type: data.FrameTypeTimeSeriesMulti,
Custom: resultTypeToCustomMeta(resultType), Custom: resultTypeToCustomMeta(resultType),
@ -699,6 +695,7 @@ func readMatrixOrVectorMulti(iter *sdkjsoniter.Iterator, resultType string, opt
frame.Meta.TypeVersion = data.FrameTypeVersion{0, 1} frame.Meta.TypeVersion = data.FrameTypeVersion{0, 1}
} }
rsp.Frames = append(rsp.Frames, frame) rsp.Frames = append(rsp.Frames, frame)
size = len(tempTimes)
} }
} }

View File

@ -0,0 +1,45 @@
package converter
import (
"os"
"testing"
jsoniter "github.com/json-iterator/go"
)
// readTestData reads a JSON file from testdata directory
func readTestData(t *testing.B, filename string) []byte {
// Can ignore gosec G304 here, because this is a constant defined below benchmark test
// nolint:gosec
data, err := os.ReadFile("testdata/" + filename)
if err != nil {
t.Fatal(err)
}
return data
}
// BenchmarkReadPrometheusStyleResult_FromFile benchmarks processing different test files
// go test -benchmem -run=^$ -bench=BenchmarkReadPrometheusStyleResult_FromFile$ github.com/grafana/grafana/pkg/promlib/converter/ -memprofile pmem.out -count 6 | tee pmem.0.txt
func BenchmarkReadPrometheusStyleResult_FromFile(b *testing.B) {
testFiles := []string{
"prom-query-range.json",
"prom-query-range-big.json",
"prom-matrix-histogram-partitioned.json",
}
opt := Options{Dataplane: true}
for _, tf := range testFiles {
testData := readTestData(b, tf)
iter := jsoniter.ParseBytes(jsoniter.ConfigDefault, testData)
b.Run(tf, func(b *testing.B) {
b.ReportAllocs()
b.ResetTimer()
for i := 0; i < b.N; i++ {
_ = ReadPrometheusStyleResult(iter, opt)
iter.ResetBytes(testData)
}
})
}
}

View File

@ -36,6 +36,8 @@ var files = []string{
"prom-exemplars-a", "prom-exemplars-a",
"prom-exemplars-b", "prom-exemplars-b",
"prom-exemplars-diff-labels", "prom-exemplars-diff-labels",
"prom-query-range",
"prom-query-range-big",
"loki-streams-a", "loki-streams-a",
"loki-streams-b", "loki-streams-b",
"loki-streams-c", "loki-streams-c",

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

File diff suppressed because one or more lines are too long