mirror of
https://github.com/grafana/grafana.git
synced 2025-08-03 06:22:13 +08:00

* Build out barebones Traces editor - Add Traces query type and operation ID prop to query type - Add necessary header types - Update resource picker to appropriately work with traces query type - Build out TracesQueryEditor component - Include logic to retrieve operationId's for AI Workspaces - Add backend route mapping - Update macro to use timestamp as default time field for traces * AzureMonitor: Traces - Response parsing (#65442) * Update FormatAsField component - Add trace ResultFormat type - Generalise FormatAsField component - Add component to TracesQueryEditor - Remove duplicate code in setQueryValue * Add custom filter function to improve performance * Add basic conversion for logs to trace - Add serviceTags converter - Pass through required parameters (queryType and resultFormat) - Appropriately set visualisation * Update parsing to also fill trace tags - Add constant values for each table schema (include legacy mapping for now if needed) - Add constant for list of table tags - Set the foundation for dynamic query building - Update query to build tags value - Appropriately set operationName - Update tagsConverter to filter empty values * Fix lint and test issues * AzureMonitor: Traces - Data links (#65566) * Add portal link for traces - Pull out necessary values (itemId and itemType) - Appropriately construct - Fix ordering * Set default format as value - Also set default visualisation * Fix event schema * Set default formatAsField value * Include logs link on traces results - Adapt config links to allow custom title to be set * Correctly set operationId for query * Update backend types - Include OperationID in query - Pass forward datasource name and UID * Ensure setTime doesn't consistently get called if operationID is defined * Add explore link - Update util functions to allow setting custom datalinks * Fix tests * AzureMonitor: Traces - Query and Editor updates (#66076) * Add initial query - Will query the resource as soon as a resource has been selected - Updates the data links for the query without operationId - Remove initial operationId query and timeRange dependency - Update query building * Add entirely separate traces query property - Update shared types (also including future types for Azure traces) - Update backend log analytics datasource to accept both azureLogAnalytics and azureTraces queries - Update backend specific types - Update frontend datasource for new properties - Update mock query * Update FormatAsField to be entirely generic * Update query building to be done in backend - Add required mappings in backend - Update frontend querying * Fix query and explore data link * Add trace type selection * Better method for setting explore link * Fix operationId updating * Run go mod tidy * Unnecessary changes * Fix tests * AzureMonitor: Traces - Add correlation API support (#65855) Add correlation API support - Add necessary types - Add correlation API request when conditions are met - Update query * Fix property from merge * AzureMonitor: Traces - Filtering (#66303) * Add initial query - Will query the resource as soon as a resource has been selected - Updates the data links for the query without operationId - Remove initial operationId query and timeRange dependency - Update query building * Add entirely separate traces query property - Update shared types (also including future types for Azure traces) - Update backend log analytics datasource to accept both azureLogAnalytics and azureTraces queries - Update backend specific types - Update frontend datasource for new properties - Update mock query * Update FormatAsField to be entirely generic * Update query building to be done in backend - Add required mappings in backend - Update frontend querying * Fix query and explore data link * Add trace type selection * Better method for setting explore link * Fix operationId updating * Run go mod tidy * Unnecessary changes * Fix tests * Start building out Filters component - Configure component to query for Filter property values when a filter property is set - Add setFilters function - Add typing to tablesSchema - Use component in TracesQueryEditor * Update Filters - Asynchronously pull property options - Setup list of Filter components * Update filters component - Remove unused imports - Have local filters state and query filters - Correctly set filters values - Don't update query every time a filter property changes (not performant) * Update properties query - Use current timeRange - Get count to provide informative labels * Reset map when time changes * Add operation selection * Reset filters when property changes * Appropriate label name for empty values * Add filtering to query * Update filter components - Fix rendering issue - Correctly compare and update timeRange - Split out files for simplicity * Add checkbox option to multiselect - Add custom option component - Correctly call onChange - Add variableOptionGroup for template variable selection * Fix adding template vars * Improve labels and refresh labels on query prop changes * AzureMonitor: Traces - Testing (#66474) * Select ds for template variable interpolation * Update az logs ds tests - Add templateVariables test - Add filter test - Update mock - Remove anys * Update QueryEditor test - Update mocks with timeSrv for log analytics datasource - Fix query mock - Use appropriate and consistent selectors * Add TracesQueryEditor test - Update resourcePickerRows mock to include app insights resources - Remove comments and extra new line * Add FormatAsField test - Remove unneeded condition * Update resourcePicker utils test * Don't hide selected options in filters * Fix multi-selection on filters * Add TraceTypeField test - Add test file - Update selectors (remove copy/paste mistake) - Update placeholder text for select and add label * Add basic filters test * Begin filters test * Update filters test * Add final tests and simplify/generalise addFilter helper * Minor update to datasource test * Update macros test * Update selectors in tests * Add response-table-frame tests * Add datasource tests - Use sorting where JSON models are inconsistent - Update filters clause - Dedupe tags - Correct operationId conditions * Don't set a default value for blurInputOnSelect * Simplify datasource test * Update to use CheckGoldenJSON utils - Update with generated frame files - Remove redundant expected frame code - Update all usages * Fix lint * AzureMonitor: Traces feedback (#67292) * Filter traces if the visualisation is set to trace - Update build query logic - Added additional test cases - Return an error if the traces type is set by itself with the trace visualisation - Add descriptions to event types - Update tests * Fix bug for error displaying traces * Update mappings and add error field - Update tests - Remove unnecessary comments * Switch location of Operation ID field * Re-order fields * Update link title * Update label for event type selection * Update correct link title * Update logs datalink to link to Azure Logs in explore * Fix lint
169 lines
5.5 KiB
Go
169 lines
5.5 KiB
Go
package macros
|
|
|
|
import (
|
|
"fmt"
|
|
"regexp"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/backend"
|
|
|
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
|
"github.com/grafana/grafana/pkg/infra/log"
|
|
"github.com/grafana/grafana/pkg/services/datasources"
|
|
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
|
|
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
|
|
"github.com/grafana/grafana/pkg/tsdb/legacydata/interval"
|
|
)
|
|
|
|
const rsIdentifier = `__(timeFilter|timeFrom|timeTo|interval|contains|escapeMulti)`
|
|
const sExpr = `\$` + rsIdentifier + `(?:\(([^\)]*)\))?`
|
|
const escapeMultiExpr = `\$__escapeMulti\(('.*')\)`
|
|
|
|
type kqlMacroEngine struct {
|
|
timeRange backend.TimeRange
|
|
query backend.DataQuery
|
|
}
|
|
|
|
// Macros:
|
|
// - $__timeFilter() -> timestamp ≥ datetime(2018-06-05T18:09:58.907Z) and timestamp ≤ datetime(2018-06-05T20:09:58.907Z)
|
|
// - $__timeFilter(datetimeColumn) -> datetimeColumn ≥ datetime(2018-06-05T18:09:58.907Z) and datetimeColumn ≤ datetime(2018-06-05T20:09:58.907Z)
|
|
// - $__from -> datetime(2018-06-05T18:09:58.907Z)
|
|
// - $__to -> datetime(2018-06-05T20:09:58.907Z)
|
|
// - $__interval -> 5m
|
|
// - $__contains(col, 'val1','val2') -> col in ('val1', 'val2')
|
|
// - $__escapeMulti('\\vm\eth0\Total','\\vm\eth2\Total') -> @'\\vm\eth0\Total',@'\\vm\eth2\Total'
|
|
|
|
// KqlInterpolate interpolates macros for Kusto Query Language (KQL) queries
|
|
func KqlInterpolate(logger log.Logger, query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField ...string) (string, error) {
|
|
engine := kqlMacroEngine{}
|
|
|
|
defaultTimeFieldForAllDatasources := "timestamp"
|
|
if len(defaultTimeField) > 0 && query.QueryType != string(dataquery.AzureQueryTypeAzureTraces) {
|
|
defaultTimeFieldForAllDatasources = defaultTimeField[0]
|
|
}
|
|
return engine.Interpolate(logger, query, dsInfo, kql, defaultTimeFieldForAllDatasources)
|
|
}
|
|
|
|
func (m *kqlMacroEngine) Interpolate(logger log.Logger, query backend.DataQuery, dsInfo types.DatasourceInfo, kql string, defaultTimeField string) (string, error) {
|
|
m.timeRange = query.TimeRange
|
|
m.query = query
|
|
rExp, _ := regexp.Compile(sExpr)
|
|
escapeMultiRegex, _ := regexp.Compile(escapeMultiExpr)
|
|
|
|
var macroError error
|
|
|
|
// First pass for the escapeMulti macro
|
|
kql = m.ReplaceAllStringSubmatchFunc(escapeMultiRegex, kql, func(groups []string) string {
|
|
args := []string{}
|
|
|
|
if len(groups) > 1 {
|
|
args = strings.Split(groups[1], "','")
|
|
}
|
|
|
|
expr := strings.Join(args, "', @'")
|
|
return fmt.Sprintf("@%s", expr)
|
|
})
|
|
|
|
// second pass for all the other macros
|
|
kql = m.ReplaceAllStringSubmatchFunc(rExp, kql, func(groups []string) string {
|
|
args := []string{}
|
|
if len(groups) > 2 {
|
|
args = strings.Split(groups[2], ",")
|
|
}
|
|
|
|
for i, arg := range args {
|
|
args[i] = strings.Trim(arg, " ")
|
|
}
|
|
res, err := m.evaluateMacro(logger, groups[1], defaultTimeField, args, dsInfo)
|
|
if err != nil && macroError == nil {
|
|
macroError = err
|
|
return "macro_error()"
|
|
}
|
|
return res
|
|
})
|
|
|
|
if macroError != nil {
|
|
return "", macroError
|
|
}
|
|
|
|
return kql, nil
|
|
}
|
|
|
|
func (m *kqlMacroEngine) evaluateMacro(logger log.Logger, name string, defaultTimeField string, args []string, dsInfo types.DatasourceInfo) (string, error) {
|
|
switch name {
|
|
case "timeFilter":
|
|
timeColumn := defaultTimeField
|
|
if len(args) > 0 && args[0] != "" {
|
|
timeColumn = args[0]
|
|
}
|
|
return fmt.Sprintf("['%s'] >= datetime('%s') and ['%s'] <= datetime('%s')", timeColumn,
|
|
m.timeRange.From.UTC().Format(time.RFC3339), timeColumn,
|
|
m.timeRange.To.UTC().Format(time.RFC3339)), nil
|
|
case "timeFrom", "__from":
|
|
return fmt.Sprintf("datetime('%s')", m.timeRange.From.UTC().Format(time.RFC3339)), nil
|
|
case "timeTo", "__to":
|
|
return fmt.Sprintf("datetime('%s')", m.timeRange.To.UTC().Format(time.RFC3339)), nil
|
|
case "interval":
|
|
var it time.Duration
|
|
if m.query.Interval.Milliseconds() == 0 {
|
|
to := m.timeRange.To.UnixNano()
|
|
from := m.timeRange.From.UnixNano()
|
|
// default to "100 datapoints" if nothing in the query is more specific
|
|
defaultInterval := time.Duration((to - from) / 60)
|
|
model, err := simplejson.NewJson(m.query.JSON)
|
|
if err != nil {
|
|
logger.Warn("Unable to parse model from query", "JSON", m.query.JSON)
|
|
it = defaultInterval
|
|
} else {
|
|
it, err = interval.GetIntervalFrom(&datasources.DataSource{
|
|
JsonData: simplejson.NewFromAny(dsInfo.JSONData),
|
|
}, model, defaultInterval)
|
|
if err != nil {
|
|
logger.Warn("Unable to get interval from query", "model", model)
|
|
it = defaultInterval
|
|
}
|
|
}
|
|
} else {
|
|
it = time.Millisecond * time.Duration(m.query.Interval.Milliseconds())
|
|
}
|
|
return fmt.Sprintf("%dms", int(it/time.Millisecond)), nil
|
|
case "contains":
|
|
if len(args) < 2 || args[0] == "" || args[1] == "" {
|
|
return "", fmt.Errorf("macro %v needs colName and variableSet", name)
|
|
}
|
|
|
|
if args[1] == "all" {
|
|
return "1 == 1", nil
|
|
}
|
|
|
|
expression := strings.Join(args[1:], ",")
|
|
return fmt.Sprintf("['%s'] in (%s)", args[0], expression), nil
|
|
case "escapeMulti":
|
|
return "", fmt.Errorf("escapeMulti macro not formatted correctly")
|
|
default:
|
|
return "", fmt.Errorf("unknown macro %q", name)
|
|
}
|
|
}
|
|
|
|
func (m *kqlMacroEngine) ReplaceAllStringSubmatchFunc(re *regexp.Regexp, str string, repl func([]string) string) string {
|
|
result := ""
|
|
lastIndex := 0
|
|
|
|
for _, v := range re.FindAllSubmatchIndex([]byte(str), -1) {
|
|
groups := []string{}
|
|
for i := 0; i < len(v); i += 2 {
|
|
if v[i] < 0 {
|
|
groups = append(groups, "")
|
|
} else {
|
|
groups = append(groups, str[v[i]:v[i+1]])
|
|
}
|
|
}
|
|
|
|
result += str[lastIndex:v[0]] + repl(groups)
|
|
lastIndex = v[1]
|
|
}
|
|
|
|
return result + str[lastIndex:]
|
|
}
|