mirror of
https://github.com/grafana/grafana.git
synced 2025-07-31 02:52:17 +08:00
Expressions: Add model struct for the query types (not map[string]any) (#82745)
This commit is contained in:
@ -179,6 +179,7 @@ Experimental features might be changed or removed without prior notice.
|
|||||||
| `nodeGraphDotLayout` | Changed the layout algorithm for the node graph |
|
| `nodeGraphDotLayout` | Changed the layout algorithm for the node graph |
|
||||||
| `newPDFRendering` | New implementation for the dashboard to PDF rendering |
|
| `newPDFRendering` | New implementation for the dashboard to PDF rendering |
|
||||||
| `kubernetesAggregator` | Enable grafana aggregator |
|
| `kubernetesAggregator` | Enable grafana aggregator |
|
||||||
|
| `expressionParser` | Enable new expression parser |
|
||||||
|
|
||||||
## Development feature toggles
|
## Development feature toggles
|
||||||
|
|
||||||
|
@ -180,6 +180,7 @@ export interface FeatureToggles {
|
|||||||
groupToNestedTableTransformation?: boolean;
|
groupToNestedTableTransformation?: boolean;
|
||||||
newPDFRendering?: boolean;
|
newPDFRendering?: boolean;
|
||||||
kubernetesAggregator?: boolean;
|
kubernetesAggregator?: boolean;
|
||||||
|
expressionParser?: boolean;
|
||||||
groupByVariable?: boolean;
|
groupByVariable?: boolean;
|
||||||
alertingUpgradeDryrunOnStart?: boolean;
|
alertingUpgradeDryrunOnStart?: boolean;
|
||||||
}
|
}
|
||||||
|
@ -275,21 +275,12 @@ type ConditionReducerJSON struct {
|
|||||||
// Params []any `json:"params"` (Unused)
|
// Params []any `json:"params"` (Unused)
|
||||||
}
|
}
|
||||||
|
|
||||||
// UnmarshalConditionsCmd creates a new ConditionsCmd.
|
func NewConditionCmd(refID string, ccj []ConditionJSON) (*ConditionsCmd, error) {
|
||||||
func UnmarshalConditionsCmd(rawQuery map[string]any, refID string) (*ConditionsCmd, error) {
|
|
||||||
jsonFromM, err := json.Marshal(rawQuery["conditions"])
|
|
||||||
if err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to remarshal classic condition body: %w", err)
|
|
||||||
}
|
|
||||||
var ccj []ConditionJSON
|
|
||||||
if err = json.Unmarshal(jsonFromM, &ccj); err != nil {
|
|
||||||
return nil, fmt.Errorf("failed to unmarshal remarshaled classic condition body: %w", err)
|
|
||||||
}
|
|
||||||
|
|
||||||
c := &ConditionsCmd{
|
c := &ConditionsCmd{
|
||||||
RefID: refID,
|
RefID: refID,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
var err error
|
||||||
for i, cj := range ccj {
|
for i, cj := range ccj {
|
||||||
cond := condition{}
|
cond := condition{}
|
||||||
|
|
||||||
@ -316,6 +307,18 @@ func UnmarshalConditionsCmd(rawQuery map[string]any, refID string) (*ConditionsC
|
|||||||
|
|
||||||
c.Conditions = append(c.Conditions, cond)
|
c.Conditions = append(c.Conditions, cond)
|
||||||
}
|
}
|
||||||
|
|
||||||
return c, nil
|
return c, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// UnmarshalConditionsCmd creates a new ConditionsCmd.
|
||||||
|
func UnmarshalConditionsCmd(rawQuery map[string]any, refID string) (*ConditionsCmd, error) {
|
||||||
|
jsonFromM, err := json.Marshal(rawQuery["conditions"])
|
||||||
|
if err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to remarshal classic condition body: %w", err)
|
||||||
|
}
|
||||||
|
var ccj []ConditionJSON
|
||||||
|
if err = json.Unmarshal(jsonFromM, &ccj); err != nil {
|
||||||
|
return nil, fmt.Errorf("failed to unmarshal remarshaled classic condition body: %w", err)
|
||||||
|
}
|
||||||
|
return NewConditionCmd(refID, ccj)
|
||||||
|
}
|
||||||
|
@ -77,14 +77,14 @@ func (gm *MathCommand) Execute(ctx context.Context, _ time.Time, vars mathexp.Va
|
|||||||
|
|
||||||
// ReduceCommand is an expression command for reduction of a timeseries such as a min, mean, or max.
|
// ReduceCommand is an expression command for reduction of a timeseries such as a min, mean, or max.
|
||||||
type ReduceCommand struct {
|
type ReduceCommand struct {
|
||||||
Reducer string
|
Reducer mathexp.ReducerID
|
||||||
VarToReduce string
|
VarToReduce string
|
||||||
refID string
|
refID string
|
||||||
seriesMapper mathexp.ReduceMapper
|
seriesMapper mathexp.ReduceMapper
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewReduceCommand creates a new ReduceCMD.
|
// NewReduceCommand creates a new ReduceCMD.
|
||||||
func NewReduceCommand(refID, reducer, varToReduce string, mapper mathexp.ReduceMapper) (*ReduceCommand, error) {
|
func NewReduceCommand(refID string, reducer mathexp.ReducerID, varToReduce string, mapper mathexp.ReduceMapper) (*ReduceCommand, error) {
|
||||||
_, err := mathexp.GetReduceFunc(reducer)
|
_, err := mathexp.GetReduceFunc(reducer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
@ -114,10 +114,11 @@ func UnmarshalReduceCommand(rn *rawNode) (*ReduceCommand, error) {
|
|||||||
if !ok {
|
if !ok {
|
||||||
return nil, errors.New("no reducer specified")
|
return nil, errors.New("no reducer specified")
|
||||||
}
|
}
|
||||||
redFunc, ok := rawReducer.(string)
|
redString, ok := rawReducer.(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
return nil, fmt.Errorf("expected reducer to be a string, got %T", rawReducer)
|
return nil, fmt.Errorf("expected reducer to be a string, got %T", rawReducer)
|
||||||
}
|
}
|
||||||
|
redFunc := mathexp.ReducerID(strings.ToLower(redString))
|
||||||
|
|
||||||
var mapper mathexp.ReduceMapper = nil
|
var mapper mathexp.ReduceMapper = nil
|
||||||
settings, ok := rn.Query["settings"]
|
settings, ok := rn.Query["settings"]
|
||||||
@ -163,7 +164,7 @@ func (gr *ReduceCommand) Execute(ctx context.Context, _ time.Time, vars mathexp.
|
|||||||
_, span := tracer.Start(ctx, "SSE.ExecuteReduce")
|
_, span := tracer.Start(ctx, "SSE.ExecuteReduce")
|
||||||
defer span.End()
|
defer span.End()
|
||||||
|
|
||||||
span.SetAttributes(attribute.String("reducer", gr.Reducer))
|
span.SetAttributes(attribute.String("reducer", string(gr.Reducer)))
|
||||||
|
|
||||||
newRes := mathexp.Results{}
|
newRes := mathexp.Results{}
|
||||||
for i, val := range vars[gr.VarToReduce].Values {
|
for i, val := range vars[gr.VarToReduce].Values {
|
||||||
|
@ -210,7 +210,7 @@ func TestReduceExecute(t *testing.T) {
|
|||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
func randomReduceFunc() string {
|
func randomReduceFunc() mathexp.ReducerID {
|
||||||
res := mathexp.GetSupportedReduceFuncs()
|
res := mathexp.GetSupportedReduceFuncs()
|
||||||
return res[rand.Intn(len(res))]
|
return res[rand.Intn(len(res))]
|
||||||
}
|
}
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
|
||||||
"github.com/grafana/grafana/pkg/services/datasources"
|
"github.com/grafana/grafana/pkg/services/datasources"
|
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestServicebuildPipeLine(t *testing.T) {
|
func TestServicebuildPipeLine(t *testing.T) {
|
||||||
@ -231,7 +232,9 @@ func TestServicebuildPipeLine(t *testing.T) {
|
|||||||
expectedOrder: []string{"B", "A"},
|
expectedOrder: []string{"B", "A"},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
s := Service{}
|
s := Service{
|
||||||
|
features: featuremgmt.WithFeatures(featuremgmt.FlagExpressionParser),
|
||||||
|
}
|
||||||
for _, tt := range tests {
|
for _, tt := range tests {
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
nodes, err := s.buildPipeline(tt.req)
|
nodes, err := s.buildPipeline(tt.req)
|
||||||
|
@ -3,13 +3,30 @@ package mathexp
|
|||||||
import (
|
import (
|
||||||
"fmt"
|
"fmt"
|
||||||
"math"
|
"math"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
)
|
)
|
||||||
|
|
||||||
type ReducerFunc = func(fv *Float64Field) *float64
|
type ReducerFunc = func(fv *Float64Field) *float64
|
||||||
|
|
||||||
|
// The reducer function
|
||||||
|
// +enum
|
||||||
|
type ReducerID string
|
||||||
|
|
||||||
|
const (
|
||||||
|
ReducerSum ReducerID = "sum"
|
||||||
|
ReducerMean ReducerID = "mean"
|
||||||
|
ReducerMin ReducerID = "min"
|
||||||
|
ReducerMax ReducerID = "max"
|
||||||
|
ReducerCount ReducerID = "count"
|
||||||
|
ReducerLast ReducerID = "last"
|
||||||
|
)
|
||||||
|
|
||||||
|
// GetSupportedReduceFuncs returns collection of supported function names
|
||||||
|
func GetSupportedReduceFuncs() []ReducerID {
|
||||||
|
return []ReducerID{ReducerSum, ReducerMean, ReducerMin, ReducerMax, ReducerCount, ReducerLast}
|
||||||
|
}
|
||||||
|
|
||||||
func Sum(fv *Float64Field) *float64 {
|
func Sum(fv *Float64Field) *float64 {
|
||||||
var sum float64
|
var sum float64
|
||||||
for i := 0; i < fv.Len(); i++ {
|
for i := 0; i < fv.Len(); i++ {
|
||||||
@ -81,34 +98,29 @@ func Last(fv *Float64Field) *float64 {
|
|||||||
return fv.GetValue(fv.Len() - 1)
|
return fv.GetValue(fv.Len() - 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetReduceFunc(rFunc string) (ReducerFunc, error) {
|
func GetReduceFunc(rFunc ReducerID) (ReducerFunc, error) {
|
||||||
switch strings.ToLower(rFunc) {
|
switch rFunc {
|
||||||
case "sum":
|
case ReducerSum:
|
||||||
return Sum, nil
|
return Sum, nil
|
||||||
case "mean":
|
case ReducerMean:
|
||||||
return Avg, nil
|
return Avg, nil
|
||||||
case "min":
|
case ReducerMin:
|
||||||
return Min, nil
|
return Min, nil
|
||||||
case "max":
|
case ReducerMax:
|
||||||
return Max, nil
|
return Max, nil
|
||||||
case "count":
|
case ReducerCount:
|
||||||
return Count, nil
|
return Count, nil
|
||||||
case "last":
|
case ReducerLast:
|
||||||
return Last, nil
|
return Last, nil
|
||||||
default:
|
default:
|
||||||
return nil, fmt.Errorf("reduction %v not implemented", rFunc)
|
return nil, fmt.Errorf("reduction %v not implemented", rFunc)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetSupportedReduceFuncs returns collection of supported function names
|
|
||||||
func GetSupportedReduceFuncs() []string {
|
|
||||||
return []string{"sum", "mean", "min", "max", "count", "last"}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Reduce turns the Series into a Number based on the given reduction function
|
// Reduce turns the Series into a Number based on the given reduction function
|
||||||
// if ReduceMapper is defined it applies it to the provided series and performs reduction of the resulting series.
|
// if ReduceMapper is defined it applies it to the provided series and performs reduction of the resulting series.
|
||||||
// Otherwise, the reduction operation is done against the original series.
|
// Otherwise, the reduction operation is done against the original series.
|
||||||
func (s Series) Reduce(refID, rFunc string, mapper ReduceMapper) (Number, error) {
|
func (s Series) Reduce(refID string, rFunc ReducerID, mapper ReduceMapper) (Number, error) {
|
||||||
var l data.Labels
|
var l data.Labels
|
||||||
if s.GetLabels() != nil {
|
if s.GetLabels() != nil {
|
||||||
l = s.GetLabels().Copy()
|
l = s.GetLabels().Copy()
|
||||||
|
@ -30,7 +30,7 @@ var seriesEmpty = Vars{
|
|||||||
func TestSeriesReduce(t *testing.T) {
|
func TestSeriesReduce(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
red string
|
red ReducerID
|
||||||
vars Vars
|
vars Vars
|
||||||
varToReduce string
|
varToReduce string
|
||||||
errIs require.ErrorAssertionFunc
|
errIs require.ErrorAssertionFunc
|
||||||
@ -217,7 +217,7 @@ var seriesNonNumbers = Vars{
|
|||||||
func TestSeriesReduceDropNN(t *testing.T) {
|
func TestSeriesReduceDropNN(t *testing.T) {
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
red string
|
red ReducerID
|
||||||
vars Vars
|
vars Vars
|
||||||
varToReduce string
|
varToReduce string
|
||||||
results Results
|
results Results
|
||||||
@ -304,7 +304,7 @@ func TestSeriesReduceReplaceNN(t *testing.T) {
|
|||||||
replaceWith := rand.Float64()
|
replaceWith := rand.Float64()
|
||||||
var tests = []struct {
|
var tests = []struct {
|
||||||
name string
|
name string
|
||||||
red string
|
red ReducerID
|
||||||
vars Vars
|
vars Vars
|
||||||
varToReduce string
|
varToReduce string
|
||||||
results Results
|
results Results
|
||||||
|
94
pkg/expr/models.go
Normal file
94
pkg/expr/models.go
Normal file
@ -0,0 +1,94 @@
|
|||||||
|
package expr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"github.com/grafana/grafana/pkg/expr/classic"
|
||||||
|
"github.com/grafana/grafana/pkg/expr/mathexp"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Supported expression types
|
||||||
|
// +enum
|
||||||
|
type QueryType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Apply a mathematical expression to results
|
||||||
|
QueryTypeMath QueryType = "math"
|
||||||
|
|
||||||
|
// Reduce query results
|
||||||
|
QueryTypeReduce QueryType = "reduce"
|
||||||
|
|
||||||
|
// Resample query results
|
||||||
|
QueryTypeResample QueryType = "resample"
|
||||||
|
|
||||||
|
// Classic query
|
||||||
|
QueryTypeClassic QueryType = "classic_conditions"
|
||||||
|
|
||||||
|
// Threshold
|
||||||
|
QueryTypeThreshold QueryType = "threshold"
|
||||||
|
)
|
||||||
|
|
||||||
|
type MathQuery struct {
|
||||||
|
// General math expression
|
||||||
|
Expression string `json:"expression" jsonschema:"minLength=1,example=$A + 1,example=$A/$B"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ReduceQuery struct {
|
||||||
|
// Reference to single query result
|
||||||
|
Expression string `json:"expression" jsonschema:"minLength=1,example=$A"`
|
||||||
|
|
||||||
|
// The reducer
|
||||||
|
Reducer mathexp.ReducerID `json:"reducer"`
|
||||||
|
|
||||||
|
// Reducer Options
|
||||||
|
Settings *ReduceSettings `json:"settings,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// QueryType = resample
|
||||||
|
type ResampleQuery struct {
|
||||||
|
// The math expression
|
||||||
|
Expression string `json:"expression" jsonschema:"minLength=1,example=$A + 1,example=$A"`
|
||||||
|
|
||||||
|
// The time durration
|
||||||
|
Window string `json:"window" jsonschema:"minLength=1,example=1w,example=10m"`
|
||||||
|
|
||||||
|
// The downsample function
|
||||||
|
Downsampler string `json:"downsampler"`
|
||||||
|
|
||||||
|
// The upsample function
|
||||||
|
Upsampler string `json:"upsampler"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ThresholdQuery struct {
|
||||||
|
// Reference to single query result
|
||||||
|
Expression string `json:"expression" jsonschema:"minLength=1,example=$A"`
|
||||||
|
|
||||||
|
// Threshold Conditions
|
||||||
|
Conditions []ThresholdConditionJSON `json:"conditions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
type ClassicQuery struct {
|
||||||
|
Conditions []classic.ConditionJSON `json:"conditions"`
|
||||||
|
}
|
||||||
|
|
||||||
|
//-------------------------------
|
||||||
|
// Non-query commands
|
||||||
|
//-------------------------------
|
||||||
|
|
||||||
|
type ReduceSettings struct {
|
||||||
|
// Non-number reduce behavior
|
||||||
|
Mode ReduceMode `json:"mode"`
|
||||||
|
|
||||||
|
// Only valid when mode is replace
|
||||||
|
ReplaceWithValue *float64 `json:"replaceWithValue,omitempty"`
|
||||||
|
}
|
||||||
|
|
||||||
|
// Non-Number behavior mode
|
||||||
|
// +enum
|
||||||
|
type ReduceMode string
|
||||||
|
|
||||||
|
const (
|
||||||
|
// Drop non-numbers
|
||||||
|
ReduceModeDrop ReduceMode = "dropNN"
|
||||||
|
|
||||||
|
// Replace non-numbers
|
||||||
|
ReduceModeReplace ReduceMode = "replaceNN"
|
||||||
|
)
|
@ -10,6 +10,8 @@ import (
|
|||||||
|
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
||||||
"github.com/grafana/grafana-plugin-sdk-go/data"
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
|
jsonitersdk "github.com/grafana/grafana-plugin-sdk-go/data/utils/jsoniter"
|
||||||
|
jsoniter "github.com/json-iterator/go"
|
||||||
"go.opentelemetry.io/otel/attribute"
|
"go.opentelemetry.io/otel/attribute"
|
||||||
"go.opentelemetry.io/otel/codes"
|
"go.opentelemetry.io/otel/codes"
|
||||||
"gonum.org/v1/gonum/graph/simple"
|
"gonum.org/v1/gonum/graph/simple"
|
||||||
@ -46,14 +48,22 @@ type rawNode struct {
|
|||||||
idx int64
|
idx int64
|
||||||
}
|
}
|
||||||
|
|
||||||
func GetExpressionCommandType(rawQuery map[string]any) (c CommandType, err error) {
|
func getExpressionCommandTypeString(rawQuery map[string]any) (string, error) {
|
||||||
rawType, ok := rawQuery["type"]
|
rawType, ok := rawQuery["type"]
|
||||||
if !ok {
|
if !ok {
|
||||||
return c, errors.New("no expression command type in query")
|
return "", errors.New("no expression command type in query")
|
||||||
}
|
}
|
||||||
typeString, ok := rawType.(string)
|
typeString, ok := rawType.(string)
|
||||||
if !ok {
|
if !ok {
|
||||||
return c, fmt.Errorf("expected expression command type to be a string, got type %T", rawType)
|
return "", fmt.Errorf("expected expression command type to be a string, got type %T", rawType)
|
||||||
|
}
|
||||||
|
return typeString, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetExpressionCommandType(rawQuery map[string]any) (c CommandType, err error) {
|
||||||
|
typeString, err := getExpressionCommandTypeString(rawQuery)
|
||||||
|
if err != nil {
|
||||||
|
return c, err
|
||||||
}
|
}
|
||||||
return ParseCommandType(typeString)
|
return ParseCommandType(typeString)
|
||||||
}
|
}
|
||||||
@ -111,6 +121,29 @@ func buildCMDNode(rn *rawNode, toggles featuremgmt.FeatureToggles) (*CMDNode, er
|
|||||||
CMDType: commandType,
|
CMDType: commandType,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if toggles.IsEnabledGlobally(featuremgmt.FlagExpressionParser) {
|
||||||
|
rn.QueryType, err = getExpressionCommandTypeString(rn.Query)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err // should not happen because the command was parsed first thing
|
||||||
|
}
|
||||||
|
|
||||||
|
// NOTE: this structure of this is weird now, because it is targeting a structure
|
||||||
|
// where this is actually run in the root loop, however we want to verify the individual
|
||||||
|
// node parsing before changing the full tree parser
|
||||||
|
reader, err := NewExpressionQueryReader(toggles)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
iter := jsoniter.ParseBytes(jsoniter.ConfigDefault, rn.QueryRaw)
|
||||||
|
q, err := reader.ReadQuery(rn, jsonitersdk.NewIterator(iter))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
node.Command = q.Command
|
||||||
|
return node, err
|
||||||
|
}
|
||||||
|
|
||||||
switch commandType {
|
switch commandType {
|
||||||
case TypeMath:
|
case TypeMath:
|
||||||
node.Command, err = UnmarshalMathCommand(rn)
|
node.Command, err = UnmarshalMathCommand(rn)
|
||||||
|
156
pkg/expr/reader.go
Normal file
156
pkg/expr/reader.go
Normal file
@ -0,0 +1,156 @@
|
|||||||
|
package expr
|
||||||
|
|
||||||
|
import (
|
||||||
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data/utils/jsoniter"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana/pkg/expr/classic"
|
||||||
|
"github.com/grafana/grafana/pkg/expr/mathexp"
|
||||||
|
"github.com/grafana/grafana/pkg/services/featuremgmt"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Once we are comfortable with the parsing logic, this struct will
|
||||||
|
// be merged/replace the existing Query struct in grafana/pkg/expr/transform.go
|
||||||
|
type ExpressionQuery struct {
|
||||||
|
RefID string
|
||||||
|
Command Command
|
||||||
|
}
|
||||||
|
|
||||||
|
type ExpressionQueryReader struct {
|
||||||
|
features featuremgmt.FeatureToggles
|
||||||
|
}
|
||||||
|
|
||||||
|
func NewExpressionQueryReader(features featuremgmt.FeatureToggles) (*ExpressionQueryReader, error) {
|
||||||
|
h := &ExpressionQueryReader{
|
||||||
|
features: features,
|
||||||
|
}
|
||||||
|
return h, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// ReadQuery implements query.TypedQueryHandler.
|
||||||
|
func (h *ExpressionQueryReader) ReadQuery(
|
||||||
|
// Properties that have been parsed off the same node
|
||||||
|
common *rawNode, // common query.CommonQueryProperties
|
||||||
|
// An iterator with context for the full node (include common values)
|
||||||
|
iter *jsoniter.Iterator,
|
||||||
|
) (eq ExpressionQuery, err error) {
|
||||||
|
referenceVar := ""
|
||||||
|
eq.RefID = common.RefID
|
||||||
|
qt := QueryType(common.QueryType)
|
||||||
|
switch qt {
|
||||||
|
case QueryTypeMath:
|
||||||
|
q := &MathQuery{}
|
||||||
|
err = iter.ReadVal(q)
|
||||||
|
if err == nil {
|
||||||
|
eq.Command, err = NewMathCommand(common.RefID, q.Expression)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QueryTypeReduce:
|
||||||
|
var mapper mathexp.ReduceMapper = nil
|
||||||
|
q := &ReduceQuery{}
|
||||||
|
err = iter.ReadVal(q)
|
||||||
|
if err == nil {
|
||||||
|
referenceVar, err = getReferenceVar(q.Expression, common.RefID)
|
||||||
|
}
|
||||||
|
if err == nil && q.Settings != nil {
|
||||||
|
switch q.Settings.Mode {
|
||||||
|
case ReduceModeDrop:
|
||||||
|
mapper = mathexp.DropNonNumber{}
|
||||||
|
case ReduceModeReplace:
|
||||||
|
if q.Settings.ReplaceWithValue == nil {
|
||||||
|
err = fmt.Errorf("setting replaceWithValue must be specified when mode is '%s'", q.Settings.Mode)
|
||||||
|
}
|
||||||
|
mapper = mathexp.ReplaceNonNumberWithValue{Value: *q.Settings.ReplaceWithValue}
|
||||||
|
default:
|
||||||
|
err = fmt.Errorf("unsupported reduce mode")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
eq.Command, err = NewReduceCommand(common.RefID,
|
||||||
|
q.Reducer, referenceVar, mapper)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QueryTypeResample:
|
||||||
|
q := &ResampleQuery{}
|
||||||
|
err = iter.ReadVal(q)
|
||||||
|
if err == nil && common.TimeRange == nil {
|
||||||
|
err = fmt.Errorf("missing time range in query")
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
referenceVar, err = getReferenceVar(q.Expression, common.RefID)
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
// tr := legacydata.NewDataTimeRange(common.TimeRange.From, common.TimeRange.To)
|
||||||
|
// AbsoluteTimeRange{
|
||||||
|
// From: tr.GetFromAsTimeUTC(),
|
||||||
|
// To: tr.GetToAsTimeUTC(),
|
||||||
|
// })
|
||||||
|
eq.Command, err = NewResampleCommand(common.RefID,
|
||||||
|
q.Window,
|
||||||
|
referenceVar,
|
||||||
|
q.Downsampler,
|
||||||
|
q.Upsampler,
|
||||||
|
common.TimeRange)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QueryTypeClassic:
|
||||||
|
q := &ClassicQuery{}
|
||||||
|
err = iter.ReadVal(q)
|
||||||
|
if err == nil {
|
||||||
|
eq.Command, err = classic.NewConditionCmd(common.RefID, q.Conditions)
|
||||||
|
}
|
||||||
|
|
||||||
|
case QueryTypeThreshold:
|
||||||
|
q := &ThresholdQuery{}
|
||||||
|
err = iter.ReadVal(q)
|
||||||
|
if err == nil {
|
||||||
|
referenceVar, err = getReferenceVar(q.Expression, common.RefID)
|
||||||
|
}
|
||||||
|
if err == nil {
|
||||||
|
// we only support one condition for now, we might want to turn this in to "OR" expressions later
|
||||||
|
if len(q.Conditions) != 1 {
|
||||||
|
return eq, fmt.Errorf("threshold expression requires exactly one condition")
|
||||||
|
}
|
||||||
|
firstCondition := q.Conditions[0]
|
||||||
|
|
||||||
|
threshold, err := NewThresholdCommand(common.RefID, referenceVar, firstCondition.Evaluator.Type, firstCondition.Evaluator.Params)
|
||||||
|
if err != nil {
|
||||||
|
return eq, fmt.Errorf("invalid condition: %w", err)
|
||||||
|
}
|
||||||
|
eq.Command = threshold
|
||||||
|
|
||||||
|
if firstCondition.UnloadEvaluator != nil && h.features.IsEnabledGlobally(featuremgmt.FlagRecoveryThreshold) {
|
||||||
|
unloading, err := NewThresholdCommand(common.RefID, referenceVar, firstCondition.UnloadEvaluator.Type, firstCondition.UnloadEvaluator.Params)
|
||||||
|
unloading.Invert = true
|
||||||
|
if err != nil {
|
||||||
|
return eq, fmt.Errorf("invalid unloadCondition: %w", err)
|
||||||
|
}
|
||||||
|
var d Fingerprints
|
||||||
|
if firstCondition.LoadedDimensions != nil {
|
||||||
|
d, err = FingerprintsFromFrame(firstCondition.LoadedDimensions)
|
||||||
|
if err != nil {
|
||||||
|
return eq, fmt.Errorf("failed to parse loaded dimensions: %w", err)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
eq.Command, err = NewHysteresisCommand(common.RefID, referenceVar, *threshold, *unloading, d)
|
||||||
|
if err != nil {
|
||||||
|
return eq, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
default:
|
||||||
|
err = fmt.Errorf("unknown query type (%s)", common.QueryType)
|
||||||
|
}
|
||||||
|
return eq, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func getReferenceVar(exp string, refId string) (string, error) {
|
||||||
|
exp = strings.TrimPrefix(exp, "%")
|
||||||
|
if exp == "" {
|
||||||
|
return "", fmt.Errorf("no variable specified to reference for refId %v", refId)
|
||||||
|
}
|
||||||
|
return exp, nil
|
||||||
|
}
|
@ -1205,6 +1205,13 @@ var (
|
|||||||
Owner: grafanaAppPlatformSquad,
|
Owner: grafanaAppPlatformSquad,
|
||||||
RequiresRestart: true,
|
RequiresRestart: true,
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "expressionParser",
|
||||||
|
Description: "Enable new expression parser",
|
||||||
|
Stage: FeatureStageExperimental,
|
||||||
|
Owner: grafanaAppPlatformSquad,
|
||||||
|
RequiresRestart: true,
|
||||||
|
},
|
||||||
{
|
{
|
||||||
Name: "groupByVariable",
|
Name: "groupByVariable",
|
||||||
Description: "Enable groupBy variable support in scenes dashboards",
|
Description: "Enable groupBy variable support in scenes dashboards",
|
||||||
|
@ -161,5 +161,6 @@ nodeGraphDotLayout,experimental,@grafana/observability-traces-and-profiling,fals
|
|||||||
groupToNestedTableTransformation,preview,@grafana/dataviz-squad,false,false,true
|
groupToNestedTableTransformation,preview,@grafana/dataviz-squad,false,false,true
|
||||||
newPDFRendering,experimental,@grafana/sharing-squad,false,false,false
|
newPDFRendering,experimental,@grafana/sharing-squad,false,false,false
|
||||||
kubernetesAggregator,experimental,@grafana/grafana-app-platform-squad,false,true,false
|
kubernetesAggregator,experimental,@grafana/grafana-app-platform-squad,false,true,false
|
||||||
|
expressionParser,experimental,@grafana/grafana-app-platform-squad,false,true,false
|
||||||
groupByVariable,experimental,@grafana/dashboards-squad,false,false,false
|
groupByVariable,experimental,@grafana/dashboards-squad,false,false,false
|
||||||
alertingUpgradeDryrunOnStart,GA,@grafana/alerting-squad,false,true,false
|
alertingUpgradeDryrunOnStart,GA,@grafana/alerting-squad,false,true,false
|
||||||
|
|
@ -655,6 +655,10 @@ const (
|
|||||||
// Enable grafana aggregator
|
// Enable grafana aggregator
|
||||||
FlagKubernetesAggregator = "kubernetesAggregator"
|
FlagKubernetesAggregator = "kubernetesAggregator"
|
||||||
|
|
||||||
|
// FlagExpressionParser
|
||||||
|
// Enable new expression parser
|
||||||
|
FlagExpressionParser = "expressionParser"
|
||||||
|
|
||||||
// FlagGroupByVariable
|
// FlagGroupByVariable
|
||||||
// Enable groupBy variable support in scenes dashboards
|
// Enable groupBy variable support in scenes dashboards
|
||||||
FlagGroupByVariable = "groupByVariable"
|
FlagGroupByVariable = "groupByVariable"
|
||||||
|
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user