mirror of
https://github.com/grafana/grafana.git
synced 2025-09-23 13:03:14 +08:00

* Build out barebones Traces editor - Add Traces query type and operation ID prop to query type - Add necessary header types - Update resource picker to appropriately work with traces query type - Build out TracesQueryEditor component - Include logic to retrieve operationId's for AI Workspaces - Add backend route mapping - Update macro to use timestamp as default time field for traces * AzureMonitor: Traces - Response parsing (#65442) * Update FormatAsField component - Add trace ResultFormat type - Generalise FormatAsField component - Add component to TracesQueryEditor - Remove duplicate code in setQueryValue * Add custom filter function to improve performance * Add basic conversion for logs to trace - Add serviceTags converter - Pass through required parameters (queryType and resultFormat) - Appropriately set visualisation * Update parsing to also fill trace tags - Add constant values for each table schema (include legacy mapping for now if needed) - Add constant for list of table tags - Set the foundation for dynamic query building - Update query to build tags value - Appropriately set operationName - Update tagsConverter to filter empty values * Fix lint and test issues * AzureMonitor: Traces - Data links (#65566) * Add portal link for traces - Pull out necessary values (itemId and itemType) - Appropriately construct - Fix ordering * Set default format as value - Also set default visualisation * Fix event schema * Set default formatAsField value * Include logs link on traces results - Adapt config links to allow custom title to be set * Correctly set operationId for query * Update backend types - Include OperationID in query - Pass forward datasource name and UID * Ensure setTime doesn't consistently get called if operationID is defined * Add explore link - Update util functions to allow setting custom datalinks * Fix tests * AzureMonitor: Traces - Query and Editor updates (#66076) * Add initial query - Will query the resource as soon as a resource has been selected - Updates the data links for the query without operationId - Remove initial operationId query and timeRange dependency - Update query building * Add entirely separate traces query property - Update shared types (also including future types for Azure traces) - Update backend log analytics datasource to accept both azureLogAnalytics and azureTraces queries - Update backend specific types - Update frontend datasource for new properties - Update mock query * Update FormatAsField to be entirely generic * Update query building to be done in backend - Add required mappings in backend - Update frontend querying * Fix query and explore data link * Add trace type selection * Better method for setting explore link * Fix operationId updating * Run go mod tidy * Unnecessary changes * Fix tests * AzureMonitor: Traces - Add correlation API support (#65855) Add correlation API support - Add necessary types - Add correlation API request when conditions are met - Update query * Fix property from merge * AzureMonitor: Traces - Filtering (#66303) * Add initial query - Will query the resource as soon as a resource has been selected - Updates the data links for the query without operationId - Remove initial operationId query and timeRange dependency - Update query building * Add entirely separate traces query property - Update shared types (also including future types for Azure traces) - Update backend log analytics datasource to accept both azureLogAnalytics and azureTraces queries - Update backend specific types - Update frontend datasource for new properties - Update mock query * Update FormatAsField to be entirely generic * Update query building to be done in backend - Add required mappings in backend - Update frontend querying * Fix query and explore data link * Add trace type selection * Better method for setting explore link * Fix operationId updating * Run go mod tidy * Unnecessary changes * Fix tests * Start building out Filters component - Configure component to query for Filter property values when a filter property is set - Add setFilters function - Add typing to tablesSchema - Use component in TracesQueryEditor * Update Filters - Asynchronously pull property options - Setup list of Filter components * Update filters component - Remove unused imports - Have local filters state and query filters - Correctly set filters values - Don't update query every time a filter property changes (not performant) * Update properties query - Use current timeRange - Get count to provide informative labels * Reset map when time changes * Add operation selection * Reset filters when property changes * Appropriate label name for empty values * Add filtering to query * Update filter components - Fix rendering issue - Correctly compare and update timeRange - Split out files for simplicity * Add checkbox option to multiselect - Add custom option component - Correctly call onChange - Add variableOptionGroup for template variable selection * Fix adding template vars * Improve labels and refresh labels on query prop changes * AzureMonitor: Traces - Testing (#66474) * Select ds for template variable interpolation * Update az logs ds tests - Add templateVariables test - Add filter test - Update mock - Remove anys * Update QueryEditor test - Update mocks with timeSrv for log analytics datasource - Fix query mock - Use appropriate and consistent selectors * Add TracesQueryEditor test - Update resourcePickerRows mock to include app insights resources - Remove comments and extra new line * Add FormatAsField test - Remove unneeded condition * Update resourcePicker utils test * Don't hide selected options in filters * Fix multi-selection on filters * Add TraceTypeField test - Add test file - Update selectors (remove copy/paste mistake) - Update placeholder text for select and add label * Add basic filters test * Begin filters test * Update filters test * Add final tests and simplify/generalise addFilter helper * Minor update to datasource test * Update macros test * Update selectors in tests * Add response-table-frame tests * Add datasource tests - Use sorting where JSON models are inconsistent - Update filters clause - Dedupe tags - Correct operationId conditions * Don't set a default value for blurInputOnSelect * Simplify datasource test * Update to use CheckGoldenJSON utils - Update with generated frame files - Remove redundant expected frame code - Update all usages * Fix lint * AzureMonitor: Traces feedback (#67292) * Filter traces if the visualisation is set to trace - Update build query logic - Added additional test cases - Return an error if the traces type is set by itself with the trace visualisation - Add descriptions to event types - Update tests * Fix bug for error displaying traces * Update mappings and add error field - Update tests - Remove unnecessary comments * Switch location of Operation ID field * Re-order fields * Update link title * Update label for event type selection * Update correct link title * Update logs datalink to link to Azure Logs in explore * Fix lint
353 lines
8.6 KiB
Go
353 lines
8.6 KiB
Go
package loganalytics
|
|
|
|
import (
|
|
"encoding/json"
|
|
"fmt"
|
|
"math"
|
|
"sort"
|
|
"strconv"
|
|
"strings"
|
|
"time"
|
|
|
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
|
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/kinds/dataquery"
|
|
"github.com/grafana/grafana/pkg/tsdb/azuremonitor/types"
|
|
)
|
|
|
|
func apiErrorToNotice(err *AzureLogAnalyticsAPIError) data.Notice {
|
|
message := []string{}
|
|
severity := data.NoticeSeverityWarning
|
|
if err.Message != nil {
|
|
message = append(message, *err.Message)
|
|
}
|
|
if err.Details != nil && len(*err.Details) > 0 {
|
|
for _, detail := range *err.Details {
|
|
if detail.Message != nil {
|
|
message = append(message, *detail.Message)
|
|
}
|
|
if detail.Innererror != nil {
|
|
if detail.Innererror.Message != nil {
|
|
message = append(message, *detail.Innererror.Message)
|
|
}
|
|
if detail.Innererror.SeverityName != nil && *detail.Innererror.SeverityName == "Error" {
|
|
// Severity names are not documented in the API response format
|
|
// https://docs.microsoft.com/en-us/azure/azure-monitor/logs/api/response-format
|
|
// so assuming either an error or a warning
|
|
severity = data.NoticeSeverityError
|
|
}
|
|
}
|
|
}
|
|
}
|
|
return data.Notice{
|
|
Severity: severity,
|
|
Text: strings.Join(message, " "),
|
|
}
|
|
}
|
|
|
|
// ResponseTableToFrame converts an AzureResponseTable to a data.Frame.
|
|
func ResponseTableToFrame(table *types.AzureResponseTable, refID string, executedQuery string, queryType dataquery.AzureQueryType, resultFormat dataquery.ResultFormat) (*data.Frame, error) {
|
|
if len(table.Rows) == 0 {
|
|
return nil, nil
|
|
}
|
|
|
|
converterFrame, err := converterFrameForTable(table, queryType, resultFormat)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
for rowIdx, row := range table.Rows {
|
|
for fieldIdx, field := range row {
|
|
err = converterFrame.Set(fieldIdx, rowIdx, field)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
}
|
|
}
|
|
|
|
return converterFrame.Frame, nil
|
|
}
|
|
|
|
func converterFrameForTable(t *types.AzureResponseTable, queryType dataquery.AzureQueryType, resultFormat dataquery.ResultFormat) (*data.FrameInputConverter, error) {
|
|
converters := []data.FieldConverter{}
|
|
colNames := make([]string, len(t.Columns))
|
|
colTypes := make([]string, len(t.Columns)) // for metadata
|
|
|
|
for i, col := range t.Columns {
|
|
colNames[i] = col.Name
|
|
colTypes[i] = col.Type
|
|
converter, ok := converterMap[col.Type]
|
|
if !ok {
|
|
return nil, fmt.Errorf("unsupported analytics column type %v", col.Type)
|
|
}
|
|
if queryType == dataquery.AzureQueryTypeAzureTraces && resultFormat == dataquery.ResultFormatTrace && (col.Name == "serviceTags" || col.Name == "tags") {
|
|
converter = tagsConverter
|
|
}
|
|
converters = append(converters, converter)
|
|
}
|
|
|
|
fic, err := data.NewFrameInputConverter(converters, len(t.Rows))
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
err = fic.Frame.SetFieldNames(colNames...)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
fic.Frame.Meta = &data.FrameMeta{
|
|
Custom: &LogAnalyticsMeta{ColumnTypes: colTypes},
|
|
}
|
|
|
|
return fic, nil
|
|
}
|
|
|
|
var converterMap = map[string]data.FieldConverter{
|
|
"string": stringConverter,
|
|
"guid": stringConverter,
|
|
"timespan": stringConverter,
|
|
"dynamic": stringConverter,
|
|
"object": objectToStringConverter,
|
|
"datetime": timeConverter,
|
|
"int": intConverter,
|
|
"long": longConverter,
|
|
"real": realConverter,
|
|
"bool": boolConverter,
|
|
"decimal": decimalConverter,
|
|
"integer": intConverter,
|
|
"number": decimalConverter,
|
|
}
|
|
|
|
type KeyValue struct {
|
|
Value interface{} `json:"value"`
|
|
Key string `json:"key"`
|
|
}
|
|
|
|
var tagsConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableJSON,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
if v == nil {
|
|
return nil, nil
|
|
}
|
|
|
|
m := map[string]any{}
|
|
err := json.Unmarshal([]byte(v.(string)), &m)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to unmarshal trace tags: %s", err)
|
|
}
|
|
|
|
parsedTags := make([]*KeyValue, 0, len(m)-1)
|
|
for k, v := range m {
|
|
if v == nil {
|
|
continue
|
|
}
|
|
|
|
switch v.(type) {
|
|
case float64:
|
|
if v == 0 {
|
|
continue
|
|
}
|
|
case string:
|
|
if v == "" {
|
|
continue
|
|
}
|
|
}
|
|
|
|
parsedTags = append(parsedTags, &KeyValue{Key: k, Value: v})
|
|
}
|
|
sort.Slice(parsedTags, func(i, j int) bool {
|
|
return parsedTags[i].Key < parsedTags[j].Key
|
|
})
|
|
|
|
marshalledTags, err := json.Marshal(parsedTags)
|
|
if err != nil {
|
|
return nil, fmt.Errorf("failed to marshal parsed trace tags: %s", err)
|
|
}
|
|
|
|
jsonTags := json.RawMessage(marshalledTags)
|
|
|
|
return &jsonTags, nil
|
|
},
|
|
}
|
|
|
|
var stringConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableString,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var as *string
|
|
if v == nil {
|
|
return as, nil
|
|
}
|
|
s, ok := v.(string)
|
|
if !ok {
|
|
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
|
|
}
|
|
as = &s
|
|
return as, nil
|
|
},
|
|
}
|
|
|
|
var objectToStringConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableString,
|
|
Converter: func(kustoValue interface{}) (interface{}, error) {
|
|
var output *string
|
|
if kustoValue == nil {
|
|
return output, nil
|
|
}
|
|
|
|
data, err := json.Marshal(kustoValue)
|
|
if err != nil {
|
|
fmt.Printf("failed to marshal column value: %s", err)
|
|
}
|
|
|
|
asString := string(data)
|
|
output = &asString
|
|
|
|
return output, nil
|
|
},
|
|
}
|
|
|
|
var timeConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableTime,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var at *time.Time
|
|
if v == nil {
|
|
return at, nil
|
|
}
|
|
s, ok := v.(string)
|
|
if !ok {
|
|
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
|
|
}
|
|
t, err := time.Parse(time.RFC3339Nano, s)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
|
|
return &t, nil
|
|
},
|
|
}
|
|
|
|
var realConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableFloat64,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var af *float64
|
|
if v == nil {
|
|
return af, nil
|
|
}
|
|
jN, ok := v.(json.Number)
|
|
if !ok {
|
|
s, sOk := v.(string)
|
|
if sOk {
|
|
switch s {
|
|
case "Infinity":
|
|
f := math.Inf(0)
|
|
return &f, nil
|
|
case "-Infinity":
|
|
f := math.Inf(-1)
|
|
return &f, nil
|
|
case "NaN":
|
|
f := math.NaN()
|
|
return &f, nil
|
|
}
|
|
}
|
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got type %T for value %v", v, v)
|
|
}
|
|
f, err := jN.Float64()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
return &f, err
|
|
},
|
|
}
|
|
|
|
var boolConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableBool,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var ab *bool
|
|
if v == nil {
|
|
return ab, nil
|
|
}
|
|
b, ok := v.(bool)
|
|
if !ok {
|
|
return nil, fmt.Errorf("unexpected type, expected bool but got %T", v)
|
|
}
|
|
return &b, nil
|
|
},
|
|
}
|
|
|
|
var intConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableInt32,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var ai *int32
|
|
if v == nil {
|
|
return ai, nil
|
|
}
|
|
jN, ok := v.(json.Number)
|
|
if !ok {
|
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
|
|
}
|
|
var err error
|
|
iv, err := strconv.ParseInt(jN.String(), 10, 32)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
aInt := int32(iv)
|
|
return &aInt, nil
|
|
},
|
|
}
|
|
|
|
var longConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableInt64,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var ai *int64
|
|
if v == nil {
|
|
return ai, nil
|
|
}
|
|
jN, ok := v.(json.Number)
|
|
if !ok {
|
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
|
|
}
|
|
out, err := jN.Int64()
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
return &out, err
|
|
},
|
|
}
|
|
|
|
// decimalConverter converts the Kusto 128-bit type number to
|
|
// a float64. We do not have 128 bit numbers in our dataframe
|
|
// model yet (and even if we did, not sure how javascript would handle them).
|
|
// In the future, we may want to revisit storing this will proper precision,
|
|
// but for now it solves the case of people getting an error response.
|
|
// If we were to keep it a string, it would not work correctly with calls
|
|
// to functions like sdk's data.LongToWide.
|
|
var decimalConverter = data.FieldConverter{
|
|
OutputFieldType: data.FieldTypeNullableFloat64,
|
|
Converter: func(v interface{}) (interface{}, error) {
|
|
var af *float64
|
|
if v == nil {
|
|
return af, nil
|
|
}
|
|
|
|
jS, sOk := v.(string)
|
|
if sOk {
|
|
out, err := strconv.ParseFloat(jS, 64)
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
return &out, err
|
|
}
|
|
|
|
// As far as I can tell this always comes in a string, but this is in the
|
|
// ADX code, so leaving this in case values do sometimes become a number somehow.
|
|
jN, nOk := v.(json.Number)
|
|
if !nOk {
|
|
return nil, fmt.Errorf("unexpected type, expected json.Number or string but got type %T with a value of %v", v, v)
|
|
}
|
|
out, err := jN.Float64() // Float64 calls strconv.ParseFloat64
|
|
if err != nil {
|
|
return nil, err
|
|
}
|
|
return &out, nil
|
|
},
|
|
}
|