mirror of
https://github.com/grafana/grafana.git
synced 2025-08-02 02:09:28 +08:00
Azure Monitor: Log Analytics response to data frames (#25297)
Co-authored-by: Ryan McKinley <ryantxu@gmail.com>
This commit is contained in:
@ -6,6 +6,7 @@ import {
|
|||||||
DataQuery,
|
DataQuery,
|
||||||
DataSourceJsonData,
|
DataSourceJsonData,
|
||||||
ScopedVars,
|
ScopedVars,
|
||||||
|
DataFrame,
|
||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import { Observable, from, of } from 'rxjs';
|
import { Observable, from, of } from 'rxjs';
|
||||||
import { config } from '..';
|
import { config } from '..';
|
||||||
@ -109,16 +110,34 @@ export class DataSourceWithBackend<
|
|||||||
requestId,
|
requestId,
|
||||||
})
|
})
|
||||||
.then((rsp: any) => {
|
.then((rsp: any) => {
|
||||||
return toDataQueryResponse(rsp);
|
const dqs = toDataQueryResponse(rsp);
|
||||||
|
if (this.processResponse) {
|
||||||
|
return this.processResponse(dqs);
|
||||||
|
}
|
||||||
|
return dqs;
|
||||||
})
|
})
|
||||||
.catch(err => {
|
.catch(err => {
|
||||||
err.isHandled = true; // Avoid extra popup warning
|
err.isHandled = true; // Avoid extra popup warning
|
||||||
return toDataQueryResponse(err);
|
const dqs = toDataQueryResponse(err);
|
||||||
|
if (this.processResponse) {
|
||||||
|
return this.processResponse(dqs);
|
||||||
|
}
|
||||||
|
return dqs;
|
||||||
});
|
});
|
||||||
|
|
||||||
return from(req);
|
return from(req);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optionally augment the response before returning the results to the
|
||||||
|
*/
|
||||||
|
processResponse?(res: DataQueryResponse): Promise<DataQueryResponse>;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Optionally process the results for display
|
||||||
|
*/
|
||||||
|
processDataFrameResult?(frame: DataFrame, idx: number): Promise<DataFrame>;
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Override to skip executing a query
|
* Override to skip executing a query
|
||||||
*
|
*
|
||||||
|
@ -4,7 +4,6 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"compress/gzip"
|
"compress/gzip"
|
||||||
"context"
|
"context"
|
||||||
"encoding/base64"
|
|
||||||
"encoding/json"
|
"encoding/json"
|
||||||
"errors"
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
@ -12,10 +11,9 @@ import (
|
|||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
"time"
|
|
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
"github.com/grafana/grafana/pkg/api/pluginproxy"
|
||||||
"github.com/grafana/grafana/pkg/components/null"
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/models"
|
"github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/plugins"
|
"github.com/grafana/grafana/pkg/plugins"
|
||||||
@ -58,11 +56,7 @@ func (e *AzureLogAnalyticsDatasource) executeTimeSeriesQuery(ctx context.Context
|
|||||||
}
|
}
|
||||||
|
|
||||||
for _, query := range queries {
|
for _, query := range queries {
|
||||||
queryRes, err := e.executeQuery(ctx, query, originalQueries, timeRange)
|
result.Results[query.RefID] = e.executeQuery(ctx, query, originalQueries, timeRange)
|
||||||
if err != nil {
|
|
||||||
queryRes.Error = err
|
|
||||||
}
|
|
||||||
result.Results[query.RefID] = queryRes
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return result, nil
|
return result, nil
|
||||||
@ -115,13 +109,17 @@ func (e *AzureLogAnalyticsDatasource) buildQueries(queries []*tsdb.Query, timeRa
|
|||||||
return azureLogAnalyticsQueries, nil
|
return azureLogAnalyticsQueries, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) (*tsdb.QueryResult, error) {
|
func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *AzureLogAnalyticsQuery, queries []*tsdb.Query, timeRange *tsdb.TimeRange) *tsdb.QueryResult {
|
||||||
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
queryResult := &tsdb.QueryResult{Meta: simplejson.New(), RefId: query.RefID}
|
||||||
|
|
||||||
|
queryResultError := func(err error) *tsdb.QueryResult {
|
||||||
|
queryResult.Error = err
|
||||||
|
return queryResult
|
||||||
|
}
|
||||||
|
|
||||||
req, err := e.createRequest(ctx, e.dsInfo)
|
req, err := e.createRequest(ctx, e.dsInfo)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
queryResult.Error = err
|
return queryResultError(err)
|
||||||
return queryResult, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
req.URL.Path = path.Join(req.URL.Path, query.URL)
|
req.URL.Path = path.Join(req.URL.Path, query.URL)
|
||||||
@ -140,38 +138,52 @@ func (e *AzureLogAnalyticsDatasource) executeQuery(ctx context.Context, query *A
|
|||||||
span.Context(),
|
span.Context(),
|
||||||
opentracing.HTTPHeaders,
|
opentracing.HTTPHeaders,
|
||||||
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
opentracing.HTTPHeadersCarrier(req.Header)); err != nil {
|
||||||
queryResult.Error = err
|
return queryResultError(err)
|
||||||
return queryResult, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
|
azlog.Debug("AzureLogAnalytics", "Request ApiURL", req.URL.String())
|
||||||
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
res, err := ctxhttp.Do(ctx, e.httpClient, req)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
queryResult.Error = err
|
return queryResultError(err)
|
||||||
return queryResult, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
data, err := e.unmarshalResponse(res)
|
logResponse, err := e.unmarshalResponse(res)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
queryResult.Error = err
|
return queryResultError(err)
|
||||||
return queryResult, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
azlog.Debug("AzureLogsAnalytics", "Response", queryResult)
|
t, err := logResponse.GetPrimaryResultTable()
|
||||||
|
if err != nil {
|
||||||
if query.ResultFormat == "table" {
|
return queryResultError(err)
|
||||||
queryResult.Tables, queryResult.Meta, err = e.parseToTables(data, query.Model, query.Params)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
queryResult.Series, queryResult.Meta, err = e.parseToTimeSeries(data, query.Model, query.Params)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return queryResult, nil
|
frame, err := LogTableToFrame(t)
|
||||||
|
if err != nil {
|
||||||
|
return queryResultError(err)
|
||||||
|
}
|
||||||
|
|
||||||
|
setAdditionalFrameMeta(frame,
|
||||||
|
query.Params.Get("query"),
|
||||||
|
query.Model.Get("subscriptionId").MustString(),
|
||||||
|
query.Model.Get("azureLogAnalytics").Get("workspace").MustString())
|
||||||
|
|
||||||
|
if query.ResultFormat == "time_series" {
|
||||||
|
tsSchema := frame.TimeSeriesSchema()
|
||||||
|
if tsSchema.Type == data.TimeSeriesTypeLong {
|
||||||
|
wideFrame, err := data.LongToWide(frame, &data.FillMissing{})
|
||||||
|
if err == nil {
|
||||||
|
frame = wideFrame
|
||||||
|
} else {
|
||||||
|
frame.AppendNotices(data.Notice{Severity: data.NoticeSeverityWarning, Text: "could not convert frame to time series, returning raw table: " + err.Error()})
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
frames := data.Frames{frame}
|
||||||
|
queryResult.Dataframes, err = frames.MarshalArrow()
|
||||||
|
if err != nil {
|
||||||
|
return queryResultError(err)
|
||||||
|
}
|
||||||
|
return queryResult
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
func (e *AzureLogAnalyticsDatasource) createRequest(ctx context.Context, dsInfo *models.DataSource) (*http.Request, error) {
|
||||||
@ -225,6 +237,17 @@ func (e *AzureLogAnalyticsDatasource) getPluginRoute(plugin *plugins.DataSourceP
|
|||||||
return logAnalyticsRoute, pluginRouteName, nil
|
return logAnalyticsRoute, pluginRouteName, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// GetPrimaryResultTable returns the first table in the response named "PrimaryResult", or an
|
||||||
|
// error if there is no table by that name.
|
||||||
|
func (ar *AzureLogAnalyticsResponse) GetPrimaryResultTable() (*AzureLogAnalyticsTable, error) {
|
||||||
|
for _, t := range ar.Tables {
|
||||||
|
if t.Name == "PrimaryResult" {
|
||||||
|
return &t, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return nil, fmt.Errorf("no data as PrimaryResult table is missing from the the response")
|
||||||
|
}
|
||||||
|
|
||||||
func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (AzureLogAnalyticsResponse, error) {
|
func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (AzureLogAnalyticsResponse, error) {
|
||||||
body, err := ioutil.ReadAll(res.Body)
|
body, err := ioutil.ReadAll(res.Body)
|
||||||
defer res.Body.Close()
|
defer res.Body.Close()
|
||||||
@ -239,7 +262,9 @@ func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (Azu
|
|||||||
}
|
}
|
||||||
|
|
||||||
var data AzureLogAnalyticsResponse
|
var data AzureLogAnalyticsResponse
|
||||||
err = json.Unmarshal(body, &data)
|
d := json.NewDecoder(bytes.NewReader(body))
|
||||||
|
d.UseNumber()
|
||||||
|
err = d.Decode(&data)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
azlog.Debug("Failed to unmarshal Azure Log Analytics response", "error", err, "status", res.Status, "body", string(body))
|
azlog.Debug("Failed to unmarshal Azure Log Analytics response", "error", err, "status", res.Status, "body", string(body))
|
||||||
return AzureLogAnalyticsResponse{}, err
|
return AzureLogAnalyticsResponse{}, err
|
||||||
@ -248,153 +273,29 @@ func (e *AzureLogAnalyticsDatasource) unmarshalResponse(res *http.Response) (Azu
|
|||||||
return data, nil
|
return data, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureLogAnalyticsDatasource) parseToTables(data AzureLogAnalyticsResponse, model *simplejson.Json, params url.Values) ([]*tsdb.Table, *simplejson.Json, error) {
|
func setAdditionalFrameMeta(frame *data.Frame, query, subscriptionID, workspace string) {
|
||||||
meta, err := createMetadata(model, params)
|
frame.Meta.ExecutedQueryString = query
|
||||||
if err != nil {
|
frame.Meta.Custom["subscription"] = subscriptionID
|
||||||
return nil, simplejson.NewFromAny(meta), err
|
frame.Meta.Custom["workspace"] = workspace
|
||||||
|
encodedQuery, err := encodeQuery(query)
|
||||||
|
if err == nil {
|
||||||
|
frame.Meta.Custom["encodedQuery"] = encodedQuery
|
||||||
|
return
|
||||||
}
|
}
|
||||||
|
azlog.Error("failed to encode the query into the encodedQuery property")
|
||||||
tables := make([]*tsdb.Table, 0)
|
|
||||||
for _, t := range data.Tables {
|
|
||||||
if t.Name == "PrimaryResult" {
|
|
||||||
table := tsdb.Table{
|
|
||||||
Columns: make([]tsdb.TableColumn, 0),
|
|
||||||
Rows: make([]tsdb.RowValues, 0),
|
|
||||||
}
|
|
||||||
|
|
||||||
meta.Columns = make([]column, 0)
|
|
||||||
for _, v := range t.Columns {
|
|
||||||
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type})
|
|
||||||
table.Columns = append(table.Columns, tsdb.TableColumn{Text: v.Name})
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range t.Rows {
|
|
||||||
values := make([]interface{}, len(table.Columns))
|
|
||||||
for i := 0; i < len(table.Columns); i++ {
|
|
||||||
values[i] = r[i]
|
|
||||||
}
|
|
||||||
table.Rows = append(table.Rows, values)
|
|
||||||
}
|
|
||||||
tables = append(tables, &table)
|
|
||||||
return tables, simplejson.NewFromAny(meta), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response")
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func (e *AzureLogAnalyticsDatasource) parseToTimeSeries(data AzureLogAnalyticsResponse, model *simplejson.Json, params url.Values) (tsdb.TimeSeriesSlice, *simplejson.Json, error) {
|
// encodeQuery encodes the query in gzip so the frontend can build links.
|
||||||
meta, err := createMetadata(model, params)
|
func encodeQuery(rawQuery string) ([]byte, error) {
|
||||||
if err != nil {
|
|
||||||
return nil, simplejson.NewFromAny(meta), err
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, t := range data.Tables {
|
|
||||||
if t.Name == "PrimaryResult" {
|
|
||||||
timeIndex, metricIndex, valueIndex := -1, -1, -1
|
|
||||||
meta.Columns = make([]column, 0)
|
|
||||||
for i, v := range t.Columns {
|
|
||||||
meta.Columns = append(meta.Columns, column{Name: v.Name, Type: v.Type})
|
|
||||||
|
|
||||||
if timeIndex == -1 && v.Type == "datetime" {
|
|
||||||
timeIndex = i
|
|
||||||
}
|
|
||||||
|
|
||||||
if metricIndex == -1 && v.Type == "string" {
|
|
||||||
metricIndex = i
|
|
||||||
}
|
|
||||||
|
|
||||||
if valueIndex == -1 && (v.Type == "int" || v.Type == "long" || v.Type == "real" || v.Type == "double") {
|
|
||||||
valueIndex = i
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if timeIndex == -1 {
|
|
||||||
azlog.Info("No time column specified. Returning existing columns, no data")
|
|
||||||
return nil, simplejson.NewFromAny(meta), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
if valueIndex == -1 {
|
|
||||||
azlog.Info("No value column specified. Returning existing columns, no data")
|
|
||||||
return nil, simplejson.NewFromAny(meta), nil
|
|
||||||
}
|
|
||||||
|
|
||||||
slice := tsdb.TimeSeriesSlice{}
|
|
||||||
buckets := map[string]*tsdb.TimeSeriesPoints{}
|
|
||||||
|
|
||||||
getSeriesBucket := func(metricName string) *tsdb.TimeSeriesPoints {
|
|
||||||
if points, ok := buckets[metricName]; ok {
|
|
||||||
return points
|
|
||||||
}
|
|
||||||
|
|
||||||
series := tsdb.NewTimeSeries(metricName, []tsdb.TimePoint{})
|
|
||||||
slice = append(slice, series)
|
|
||||||
buckets[metricName] = &series.Points
|
|
||||||
|
|
||||||
return &series.Points
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, r := range t.Rows {
|
|
||||||
timeStr, ok := r[timeIndex].(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, simplejson.NewFromAny(meta), errors.New("invalid time value")
|
|
||||||
}
|
|
||||||
timeValue, err := time.Parse(time.RFC3339Nano, timeStr)
|
|
||||||
if err != nil {
|
|
||||||
return nil, simplejson.NewFromAny(meta), err
|
|
||||||
}
|
|
||||||
|
|
||||||
var value float64
|
|
||||||
if value, err = getFloat(r[valueIndex]); err != nil {
|
|
||||||
return nil, simplejson.NewFromAny(meta), err
|
|
||||||
}
|
|
||||||
|
|
||||||
var metricName string
|
|
||||||
if metricIndex == -1 {
|
|
||||||
metricName = t.Columns[valueIndex].Name
|
|
||||||
} else {
|
|
||||||
metricName, ok = r[metricIndex].(string)
|
|
||||||
if !ok {
|
|
||||||
return nil, simplejson.NewFromAny(meta), err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
points := getSeriesBucket(metricName)
|
|
||||||
*points = append(*points, tsdb.NewTimePoint(null.FloatFrom(value), float64(timeValue.Unix()*1000)))
|
|
||||||
}
|
|
||||||
|
|
||||||
return slice, simplejson.NewFromAny(meta), nil
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return nil, nil, errors.New("no data as no PrimaryResult table was returned in the response")
|
|
||||||
}
|
|
||||||
|
|
||||||
func createMetadata(model *simplejson.Json, params url.Values) (metadata, error) {
|
|
||||||
meta := metadata{
|
|
||||||
Query: params.Get("query"),
|
|
||||||
Subscription: model.Get("subscriptionId").MustString(),
|
|
||||||
Workspace: model.Get("azureLogAnalytics").Get("workspace").MustString(),
|
|
||||||
}
|
|
||||||
|
|
||||||
encQuery, err := encodeQuery(meta.Query)
|
|
||||||
if err != nil {
|
|
||||||
return meta, err
|
|
||||||
}
|
|
||||||
meta.EncodedQuery = encQuery
|
|
||||||
return meta, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func encodeQuery(rawQuery string) (string, error) {
|
|
||||||
var b bytes.Buffer
|
var b bytes.Buffer
|
||||||
gz := gzip.NewWriter(&b)
|
gz := gzip.NewWriter(&b)
|
||||||
if _, err := gz.Write([]byte(rawQuery)); err != nil {
|
if _, err := gz.Write([]byte(rawQuery)); err != nil {
|
||||||
return "", err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
if err := gz.Close(); err != nil {
|
if err := gz.Close(); err != nil {
|
||||||
return "", err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
|
||||||
return base64.StdEncoding.EncodeToString(b.Bytes()), nil
|
return b.Bytes(), nil
|
||||||
}
|
}
|
||||||
|
@ -1,17 +1,13 @@
|
|||||||
package azuremonitor
|
package azuremonitor
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/json"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"io/ioutil"
|
|
||||||
"net/url"
|
"net/url"
|
||||||
"path/filepath"
|
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/google/go-cmp/cmp"
|
"github.com/google/go-cmp/cmp"
|
||||||
"github.com/google/go-cmp/cmp/cmpopts"
|
"github.com/google/go-cmp/cmp/cmpopts"
|
||||||
"github.com/grafana/grafana/pkg/components/null"
|
|
||||||
"github.com/grafana/grafana/pkg/components/simplejson"
|
"github.com/grafana/grafana/pkg/components/simplejson"
|
||||||
"github.com/grafana/grafana/pkg/models"
|
"github.com/grafana/grafana/pkg/models"
|
||||||
"github.com/grafana/grafana/pkg/plugins"
|
"github.com/grafana/grafana/pkg/plugins"
|
||||||
@ -83,235 +79,6 @@ func TestBuildingAzureLogAnalyticsQueries(t *testing.T) {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestParsingAzureLogAnalyticsResponses(t *testing.T) {
|
|
||||||
datasource := &AzureLogAnalyticsDatasource{}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
testFile string
|
|
||||||
query string
|
|
||||||
series tsdb.TimeSeriesSlice
|
|
||||||
meta string
|
|
||||||
Err require.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "Response with single series should be parsed into the Grafana time series format",
|
|
||||||
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json",
|
|
||||||
query: "test query",
|
|
||||||
series: tsdb.TimeSeriesSlice{
|
|
||||||
&tsdb.TimeSeries{
|
|
||||||
Name: "grafana-vm",
|
|
||||||
Points: tsdb.TimeSeriesPoints{
|
|
||||||
{null.FloatFrom(1.1), null.FloatFrom(1587323766000)},
|
|
||||||
{null.FloatFrom(2.2), null.FloatFrom(1587323776000)},
|
|
||||||
{null.FloatFrom(3.3), null.FloatFrom(1587323786000)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Response with multiple series should be parsed into the Grafana time series format",
|
|
||||||
testFile: "loganalytics/2-log-analytics-response-metrics-multiple-series.json",
|
|
||||||
query: "test query",
|
|
||||||
series: tsdb.TimeSeriesSlice{
|
|
||||||
&tsdb.TimeSeries{
|
|
||||||
Name: "Processor",
|
|
||||||
Points: tsdb.TimeSeriesPoints{
|
|
||||||
{null.FloatFrom(0.75), null.FloatFrom(1587418800000)},
|
|
||||||
{null.FloatFrom(1.0055555555555555), null.FloatFrom(1587419100000)},
|
|
||||||
{null.FloatFrom(0.7407407407407407), null.FloatFrom(1587419400000)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&tsdb.TimeSeries{
|
|
||||||
Name: "Logical Disk",
|
|
||||||
Points: tsdb.TimeSeriesPoints{
|
|
||||||
{null.FloatFrom(16090.551851851851), null.FloatFrom(1587418800000)},
|
|
||||||
{null.FloatFrom(16090.537037037036), null.FloatFrom(1587419100000)},
|
|
||||||
{null.FloatFrom(16090.586419753086), null.FloatFrom(1587419400000)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
&tsdb.TimeSeries{
|
|
||||||
Name: "Memory",
|
|
||||||
Points: tsdb.TimeSeriesPoints{
|
|
||||||
{null.FloatFrom(702.0666666666667), null.FloatFrom(1587418800000)},
|
|
||||||
{null.FloatFrom(700.5888888888888), null.FloatFrom(1587419100000)},
|
|
||||||
{null.FloatFrom(703.1111111111111), null.FloatFrom(1587419400000)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"ObjectName","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Response with no metric name column should use the value column name as the series name",
|
|
||||||
testFile: "loganalytics/3-log-analytics-response-metrics-no-metric-column.json",
|
|
||||||
query: "test query",
|
|
||||||
series: tsdb.TimeSeriesSlice{
|
|
||||||
&tsdb.TimeSeries{
|
|
||||||
Name: "avg_CounterValue",
|
|
||||||
Points: tsdb.TimeSeriesPoints{
|
|
||||||
{null.FloatFrom(1), null.FloatFrom(1587323766000)},
|
|
||||||
{null.FloatFrom(2), null.FloatFrom(1587323776000)},
|
|
||||||
{null.FloatFrom(3), null.FloatFrom(1587323786000)},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"avg_CounterValue","type":"int"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Response with no time column should return no data",
|
|
||||||
testFile: "loganalytics/4-log-analytics-response-metrics-no-time-column.json",
|
|
||||||
query: "test query",
|
|
||||||
series: nil,
|
|
||||||
meta: `{"columns":[{"name":"Computer","type":"string"},{"name":"avg_CounterValue","type":"real"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: "Response with no value column should return no data",
|
|
||||||
testFile: "loganalytics/5-log-analytics-response-metrics-no-value-column.json",
|
|
||||||
query: "test query",
|
|
||||||
series: nil,
|
|
||||||
meta: `{"columns":[{"name":"TimeGenerated","type":"datetime"},{"name":"Computer","type":"string"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
data, _ := loadLogAnalyticsTestFile(tt.testFile)
|
|
||||||
|
|
||||||
model := simplejson.NewFromAny(map[string]interface{}{
|
|
||||||
"subscriptionId": "1234",
|
|
||||||
"azureLogAnalytics": map[string]interface{}{
|
|
||||||
"workspace": "aworkspace",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
params := url.Values{}
|
|
||||||
params.Add("query", tt.query)
|
|
||||||
series, meta, err := datasource.parseToTimeSeries(data, model, params)
|
|
||||||
tt.Err(t, err)
|
|
||||||
|
|
||||||
if diff := cmp.Diff(tt.series, series, cmpopts.EquateNaNs()); diff != "" {
|
|
||||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
|
||||||
}
|
|
||||||
|
|
||||||
json, _ := json.Marshal(meta)
|
|
||||||
cols := string(json)
|
|
||||||
|
|
||||||
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
|
|
||||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestParsingAzureLogAnalyticsTableResponses(t *testing.T) {
|
|
||||||
datasource := &AzureLogAnalyticsDatasource{}
|
|
||||||
tests := []struct {
|
|
||||||
name string
|
|
||||||
testFile string
|
|
||||||
query string
|
|
||||||
tables []*tsdb.Table
|
|
||||||
meta string
|
|
||||||
Err require.ErrorAssertionFunc
|
|
||||||
}{
|
|
||||||
{
|
|
||||||
name: "Table data should be parsed into the table format Response",
|
|
||||||
testFile: "loganalytics/6-log-analytics-response-table.json",
|
|
||||||
query: "test query",
|
|
||||||
tables: []*tsdb.Table{
|
|
||||||
{
|
|
||||||
Columns: []tsdb.TableColumn{
|
|
||||||
{Text: "TenantId"},
|
|
||||||
{Text: "Computer"},
|
|
||||||
{Text: "ObjectName"},
|
|
||||||
{Text: "CounterName"},
|
|
||||||
{Text: "InstanceName"},
|
|
||||||
{Text: "Min"},
|
|
||||||
{Text: "Max"},
|
|
||||||
{Text: "SampleCount"},
|
|
||||||
{Text: "CounterValue"},
|
|
||||||
{Text: "TimeGenerated"},
|
|
||||||
},
|
|
||||||
Rows: []tsdb.RowValues{
|
|
||||||
{
|
|
||||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
|
||||||
string("grafana-vm"),
|
|
||||||
string("Memory"),
|
|
||||||
string("Available MBytes Memory"),
|
|
||||||
string("Memory"),
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
float64(2040),
|
|
||||||
string("2020-04-23T11:46:03.857Z"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
|
||||||
string("grafana-vm"),
|
|
||||||
string("Memory"),
|
|
||||||
string("Available MBytes Memory"),
|
|
||||||
string("Memory"),
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
float64(2066),
|
|
||||||
string("2020-04-23T11:46:13.857Z"),
|
|
||||||
},
|
|
||||||
{
|
|
||||||
string("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
|
||||||
string("grafana-vm"),
|
|
||||||
string("Memory"),
|
|
||||||
string("Available MBytes Memory"),
|
|
||||||
string("Memory"),
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
nil,
|
|
||||||
float64(2066),
|
|
||||||
string("2020-04-23T11:46:23.857Z"),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
meta: `{"columns":[{"name":"TenantId","type":"string"},{"name":"Computer","type":"string"},{"name":"ObjectName","type":"string"},{"name":"CounterName","type":"string"},` +
|
|
||||||
`{"name":"InstanceName","type":"string"},{"name":"Min","type":"real"},{"name":"Max","type":"real"},{"name":"SampleCount","type":"int"},{"name":"CounterValue","type":"real"},` +
|
|
||||||
`{"name":"TimeGenerated","type":"datetime"}],"subscription":"1234","workspace":"aworkspace","query":"test query","encodedQuery":"H4sIAAAAAAAA/ypJLS5RKCxNLaoEBAAA///0rBfVCgAAAA=="}`,
|
|
||||||
Err: require.NoError,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, tt := range tests {
|
|
||||||
t.Run(tt.name, func(t *testing.T) {
|
|
||||||
data, _ := loadLogAnalyticsTestFile(tt.testFile)
|
|
||||||
|
|
||||||
model := simplejson.NewFromAny(map[string]interface{}{
|
|
||||||
"subscriptionId": "1234",
|
|
||||||
"azureLogAnalytics": map[string]interface{}{
|
|
||||||
"workspace": "aworkspace",
|
|
||||||
},
|
|
||||||
})
|
|
||||||
params := url.Values{}
|
|
||||||
params.Add("query", tt.query)
|
|
||||||
tables, meta, err := datasource.parseToTables(data, model, params)
|
|
||||||
tt.Err(t, err)
|
|
||||||
|
|
||||||
if diff := cmp.Diff(tt.tables, tables, cmpopts.EquateNaNs()); diff != "" {
|
|
||||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
|
||||||
}
|
|
||||||
|
|
||||||
json, _ := json.Marshal(meta)
|
|
||||||
cols := string(json)
|
|
||||||
|
|
||||||
if diff := cmp.Diff(tt.meta, cols, cmpopts.EquateNaNs()); diff != "" {
|
|
||||||
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
|
||||||
}
|
|
||||||
|
|
||||||
})
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestPluginRoutes(t *testing.T) {
|
func TestPluginRoutes(t *testing.T) {
|
||||||
datasource := &AzureLogAnalyticsDatasource{}
|
datasource := &AzureLogAnalyticsDatasource{}
|
||||||
plugin := &plugins.DataSourcePlugin{
|
plugin := &plugins.DataSourcePlugin{
|
||||||
@ -389,15 +156,3 @@ func TestPluginRoutes(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
}
|
}
|
||||||
|
|
||||||
func loadLogAnalyticsTestFile(name string) (AzureLogAnalyticsResponse, error) {
|
|
||||||
var data AzureLogAnalyticsResponse
|
|
||||||
|
|
||||||
path := filepath.Join("testdata", name)
|
|
||||||
jsonBody, err := ioutil.ReadFile(path)
|
|
||||||
if err != nil {
|
|
||||||
return data, err
|
|
||||||
}
|
|
||||||
err = json.Unmarshal(jsonBody, &data)
|
|
||||||
return data, err
|
|
||||||
}
|
|
||||||
|
181
pkg/tsdb/azuremonitor/azure-log-analytics-table-frame.go
Normal file
181
pkg/tsdb/azuremonitor/azure-log-analytics-table-frame.go
Normal file
@ -0,0 +1,181 @@
|
|||||||
|
package azuremonitor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"fmt"
|
||||||
|
"strconv"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
|
)
|
||||||
|
|
||||||
|
// LogTableToFrame converts an AzureLogAnalyticsTable to a data.Frame.
|
||||||
|
func LogTableToFrame(table *AzureLogAnalyticsTable) (*data.Frame, error) {
|
||||||
|
converterFrame, err := converterFrameForTable(table)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for rowIdx, row := range table.Rows {
|
||||||
|
for fieldIdx, field := range row {
|
||||||
|
err = converterFrame.Set(fieldIdx, rowIdx, field)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return converterFrame.Frame, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func converterFrameForTable(t *AzureLogAnalyticsTable) (*data.FrameInputConverter, error) {
|
||||||
|
converters := []data.FieldConverter{}
|
||||||
|
colNames := make([]string, len(t.Columns))
|
||||||
|
colTypes := make([]string, len(t.Columns)) // for metadata
|
||||||
|
|
||||||
|
for i, col := range t.Columns {
|
||||||
|
colNames[i] = col.Name
|
||||||
|
colTypes[i] = col.Type
|
||||||
|
converter, ok := converterMap[col.Type]
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unsupported analytics column type %v", col.Type)
|
||||||
|
}
|
||||||
|
converters = append(converters, converter)
|
||||||
|
}
|
||||||
|
|
||||||
|
fic, err := data.NewFrameInputConverter(converters, len(t.Rows))
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
err = fic.Frame.SetFieldNames(colNames...)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
fic.Frame.Meta = &data.FrameMeta{
|
||||||
|
Custom: map[string]interface{}{"azureColumnTypes": colTypes},
|
||||||
|
}
|
||||||
|
|
||||||
|
return fic, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var converterMap = map[string]data.FieldConverter{
|
||||||
|
"string": stringConverter,
|
||||||
|
"guid": stringConverter,
|
||||||
|
"timespan": stringConverter,
|
||||||
|
"dynamic": stringConverter,
|
||||||
|
"datetime": timeConverter,
|
||||||
|
"int": intConverter,
|
||||||
|
"long": longConverter,
|
||||||
|
"real": realConverter,
|
||||||
|
"bool": boolConverter,
|
||||||
|
}
|
||||||
|
|
||||||
|
var stringConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableString,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var as *string
|
||||||
|
if v == nil {
|
||||||
|
return as, nil
|
||||||
|
}
|
||||||
|
s, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
|
||||||
|
}
|
||||||
|
as = &s
|
||||||
|
return as, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var timeConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableTime,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var at *time.Time
|
||||||
|
if v == nil {
|
||||||
|
return at, nil
|
||||||
|
}
|
||||||
|
s, ok := v.(string)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected string but got %T", v)
|
||||||
|
}
|
||||||
|
t, err := time.Parse(time.RFC3339Nano, s)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
return &t, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var realConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableFloat64,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var af *float64
|
||||||
|
if v == nil {
|
||||||
|
return af, nil
|
||||||
|
}
|
||||||
|
jN, ok := v.(json.Number)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
|
||||||
|
}
|
||||||
|
f, err := jN.Float64()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &f, err
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var boolConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableBool,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var ab *bool
|
||||||
|
if v == nil {
|
||||||
|
return ab, nil
|
||||||
|
}
|
||||||
|
b, ok := v.(bool)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected bool but got %T", v)
|
||||||
|
}
|
||||||
|
return &b, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var intConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableInt32,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var ai *int32
|
||||||
|
if v == nil {
|
||||||
|
return ai, nil
|
||||||
|
}
|
||||||
|
jN, ok := v.(json.Number)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
|
||||||
|
}
|
||||||
|
var err error
|
||||||
|
iv, err := strconv.ParseInt(jN.String(), 10, 32)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
aInt := int32(iv)
|
||||||
|
return &aInt, nil
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
var longConverter = data.FieldConverter{
|
||||||
|
OutputFieldType: data.FieldTypeNullableInt64,
|
||||||
|
Converter: func(v interface{}) (interface{}, error) {
|
||||||
|
var ai *int64
|
||||||
|
if v == nil {
|
||||||
|
return ai, nil
|
||||||
|
}
|
||||||
|
jN, ok := v.(json.Number)
|
||||||
|
if !ok {
|
||||||
|
return nil, fmt.Errorf("unexpected type, expected json.Number but got %T", v)
|
||||||
|
}
|
||||||
|
out, err := jN.Int64()
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &out, err
|
||||||
|
},
|
||||||
|
}
|
153
pkg/tsdb/azuremonitor/azure-log-analytics-table-frame_test.go
Normal file
153
pkg/tsdb/azuremonitor/azure-log-analytics-table-frame_test.go
Normal file
@ -0,0 +1,153 @@
|
|||||||
|
package azuremonitor
|
||||||
|
|
||||||
|
import (
|
||||||
|
"encoding/json"
|
||||||
|
"os"
|
||||||
|
"path/filepath"
|
||||||
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
|
"github.com/google/go-cmp/cmp"
|
||||||
|
"github.com/grafana/grafana-plugin-sdk-go/data"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
"github.com/xorcare/pointer"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestLogTableToFrame(t *testing.T) {
|
||||||
|
tests := []struct {
|
||||||
|
name string
|
||||||
|
testFile string
|
||||||
|
expectedFrame func() *data.Frame
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "single series",
|
||||||
|
testFile: "loganalytics/1-log-analytics-response-metrics-single-series.json",
|
||||||
|
expectedFrame: func() *data.Frame {
|
||||||
|
frame := data.NewFrame("",
|
||||||
|
data.NewField("TimeGenerated", nil, []*time.Time{
|
||||||
|
pointer.Time(time.Date(2020, 4, 19, 19, 16, 6, 5e8, time.UTC)),
|
||||||
|
pointer.Time(time.Date(2020, 4, 19, 19, 16, 16, 5e8, time.UTC)),
|
||||||
|
pointer.Time(time.Date(2020, 4, 19, 19, 16, 26, 5e8, time.UTC)),
|
||||||
|
}),
|
||||||
|
data.NewField("Computer", nil, []*string{
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
}),
|
||||||
|
data.NewField("avg_CounterValue", nil, []*float64{
|
||||||
|
pointer.Float64(1.1),
|
||||||
|
pointer.Float64(2.2),
|
||||||
|
pointer.Float64(3.3),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
frame.Meta = &data.FrameMeta{
|
||||||
|
Custom: map[string]interface{}{"azureColumnTypes": []string{"datetime", "string", "real"}},
|
||||||
|
}
|
||||||
|
return frame
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "response table",
|
||||||
|
testFile: "loganalytics/6-log-analytics-response-table.json",
|
||||||
|
expectedFrame: func() *data.Frame {
|
||||||
|
frame := data.NewFrame("",
|
||||||
|
data.NewField("TenantId", nil, []*string{
|
||||||
|
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
||||||
|
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
||||||
|
pointer.String("a2c1b44e-3e57-4410-b027-6cc0ae6dee67"),
|
||||||
|
}),
|
||||||
|
data.NewField("Computer", nil, []*string{
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
pointer.String("grafana-vm"),
|
||||||
|
}),
|
||||||
|
data.NewField("ObjectName", nil, []*string{
|
||||||
|
pointer.String("Memory"),
|
||||||
|
pointer.String("Memory"),
|
||||||
|
pointer.String("Memory"),
|
||||||
|
}),
|
||||||
|
data.NewField("CounterName", nil, []*string{
|
||||||
|
pointer.String("Available MBytes Memory"),
|
||||||
|
pointer.String("Available MBytes Memory"),
|
||||||
|
pointer.String("Available MBytes Memory"),
|
||||||
|
}),
|
||||||
|
data.NewField("InstanceName", nil, []*string{
|
||||||
|
pointer.String("Memory"),
|
||||||
|
pointer.String("Memory"),
|
||||||
|
pointer.String("Memory"),
|
||||||
|
}),
|
||||||
|
data.NewField("Min", nil, []*float64{nil, nil, nil}),
|
||||||
|
data.NewField("Max", nil, []*float64{nil, nil, nil}),
|
||||||
|
data.NewField("SampleCount", nil, []*int32{nil, nil, nil}),
|
||||||
|
data.NewField("CounterValue", nil, []*float64{
|
||||||
|
pointer.Float64(2040),
|
||||||
|
pointer.Float64(2066),
|
||||||
|
pointer.Float64(2066),
|
||||||
|
}),
|
||||||
|
data.NewField("TimeGenerated", nil, []*time.Time{
|
||||||
|
pointer.Time(time.Date(2020, 4, 23, 11, 46, 3, 857e6, time.UTC)),
|
||||||
|
pointer.Time(time.Date(2020, 4, 23, 11, 46, 13, 857e6, time.UTC)),
|
||||||
|
pointer.Time(time.Date(2020, 4, 23, 11, 46, 23, 857e6, time.UTC)),
|
||||||
|
}),
|
||||||
|
)
|
||||||
|
frame.Meta = &data.FrameMeta{
|
||||||
|
Custom: map[string]interface{}{"azureColumnTypes": []string{"string", "string", "string",
|
||||||
|
"string", "string", "real", "real", "int", "real", "datetime"}},
|
||||||
|
}
|
||||||
|
return frame
|
||||||
|
},
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "all supported field types",
|
||||||
|
testFile: "loganalytics/7-log-analytics-all-types-table.json",
|
||||||
|
expectedFrame: func() *data.Frame {
|
||||||
|
frame := data.NewFrame("",
|
||||||
|
data.NewField("XBool", nil, []*bool{pointer.Bool(true)}),
|
||||||
|
data.NewField("XString", nil, []*string{pointer.String("Grafana")}),
|
||||||
|
data.NewField("XDateTime", nil, []*time.Time{pointer.Time(time.Date(2006, 1, 2, 22, 4, 5, 1*1e8, time.UTC))}),
|
||||||
|
data.NewField("XDynamic", nil, []*string{pointer.String(`[{"person":"Daniel"},{"cats":23},{"diagnosis":"cat problem"}]`)}),
|
||||||
|
data.NewField("XGuid", nil, []*string{pointer.String("74be27de-1e4e-49d9-b579-fe0b331d3642")}),
|
||||||
|
data.NewField("XInt", nil, []*int32{pointer.Int32(2147483647)}),
|
||||||
|
data.NewField("XLong", nil, []*int64{pointer.Int64(9223372036854775807)}),
|
||||||
|
data.NewField("XReal", nil, []*float64{pointer.Float64(1.797693134862315708145274237317043567981e+308)}),
|
||||||
|
data.NewField("XTimeSpan", nil, []*string{pointer.String("00:00:00.0000001")}),
|
||||||
|
)
|
||||||
|
frame.Meta = &data.FrameMeta{
|
||||||
|
Custom: map[string]interface{}{"azureColumnTypes": []string{"bool", "string", "datetime",
|
||||||
|
"dynamic", "guid", "int", "long", "real", "timespan"}},
|
||||||
|
}
|
||||||
|
return frame
|
||||||
|
},
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, tt := range tests {
|
||||||
|
t.Run(tt.name, func(t *testing.T) {
|
||||||
|
res, err := loadLogAnalyticsTestFileWithNumber(tt.testFile)
|
||||||
|
require.NoError(t, err)
|
||||||
|
frame, err := LogTableToFrame(&res.Tables[0])
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
if diff := cmp.Diff(tt.expectedFrame(), frame, data.FrameTestCompareOptions()...); diff != "" {
|
||||||
|
t.Errorf("Result mismatch (-want +got):\n%s", diff)
|
||||||
|
}
|
||||||
|
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
func loadLogAnalyticsTestFileWithNumber(name string) (AzureLogAnalyticsResponse, error) {
|
||||||
|
var data AzureLogAnalyticsResponse
|
||||||
|
|
||||||
|
path := filepath.Join("testdata", name)
|
||||||
|
f, err := os.Open(path)
|
||||||
|
if err != nil {
|
||||||
|
return data, err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
d := json.NewDecoder(f)
|
||||||
|
d.UseNumber()
|
||||||
|
err = d.Decode(&data)
|
||||||
|
return data, err
|
||||||
|
}
|
59
pkg/tsdb/azuremonitor/testdata/loganalytics/7-log-analytics-all-types-table.json
vendored
Normal file
59
pkg/tsdb/azuremonitor/testdata/loganalytics/7-log-analytics-all-types-table.json
vendored
Normal file
@ -0,0 +1,59 @@
|
|||||||
|
{
|
||||||
|
"tables": [
|
||||||
|
{
|
||||||
|
"name": "PrimaryResult",
|
||||||
|
"columns": [
|
||||||
|
{
|
||||||
|
"name": "XBool",
|
||||||
|
"type": "bool"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XString",
|
||||||
|
"type": "string"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XDateTime",
|
||||||
|
"type": "datetime"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XDynamic",
|
||||||
|
"type": "dynamic"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XGuid",
|
||||||
|
"type": "guid"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XInt",
|
||||||
|
"type": "int"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XLong",
|
||||||
|
"type": "long"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XReal",
|
||||||
|
"type": "real"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "XTimeSpan",
|
||||||
|
"type": "timespan"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"rows": [
|
||||||
|
[
|
||||||
|
true,
|
||||||
|
"Grafana",
|
||||||
|
"2006-01-02T22:04:05.1Z",
|
||||||
|
"[{\"person\":\"Daniel\"},{\"cats\":23},{\"diagnosis\":\"cat problem\"}]",
|
||||||
|
"74be27de-1e4e-49d9-b579-fe0b331d3642",
|
||||||
|
2147483647,
|
||||||
|
9223372036854775807,
|
||||||
|
1.7976931348623157e+308,
|
||||||
|
"00:00:00.0000001"
|
||||||
|
]
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
|
@ -78,19 +78,6 @@ type AzureLogAnalyticsTable struct {
|
|||||||
Rows [][]interface{} `json:"rows"`
|
Rows [][]interface{} `json:"rows"`
|
||||||
}
|
}
|
||||||
|
|
||||||
type metadata struct {
|
|
||||||
Columns []column `json:"columns"`
|
|
||||||
Subscription string `json:"subscription"`
|
|
||||||
Workspace string `json:"workspace"`
|
|
||||||
Query string `json:"query"`
|
|
||||||
EncodedQuery string `json:"encodedQuery"`
|
|
||||||
}
|
|
||||||
|
|
||||||
type column struct {
|
|
||||||
Name string `json:"name"`
|
|
||||||
Type string `json:"type"`
|
|
||||||
}
|
|
||||||
|
|
||||||
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
|
// azureMonitorJSONQuery is the frontend JSON query model for an Azure Monitor query.
|
||||||
type azureMonitorJSONQuery struct {
|
type azureMonitorJSONQuery struct {
|
||||||
AzureMonitor struct {
|
AzureMonitor struct {
|
||||||
|
@ -3,17 +3,18 @@ import { DataFrame, toUtc, getFrameDisplayName } from '@grafana/data';
|
|||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||||
|
|
||||||
|
const templateSrv = new TemplateSrv();
|
||||||
|
|
||||||
jest.mock('@grafana/runtime', () => ({
|
jest.mock('@grafana/runtime', () => ({
|
||||||
...jest.requireActual('@grafana/runtime'),
|
...jest.requireActual('@grafana/runtime'),
|
||||||
getBackendSrv: () => backendSrv,
|
getBackendSrv: () => backendSrv,
|
||||||
|
getTemplateSrv: () => templateSrv,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('AppInsightsDatasource', () => {
|
describe('AppInsightsDatasource', () => {
|
||||||
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
const datasourceRequestMock = jest.spyOn(backendSrv, 'datasourceRequest');
|
||||||
|
|
||||||
const ctx: any = {
|
const ctx: any = {};
|
||||||
templateSrv: new TemplateSrv(),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
jest.clearAllMocks();
|
jest.clearAllMocks();
|
||||||
@ -22,7 +23,7 @@ describe('AppInsightsDatasource', () => {
|
|||||||
url: 'http://appinsightsapi',
|
url: 'http://appinsightsapi',
|
||||||
};
|
};
|
||||||
|
|
||||||
ctx.ds = new Datasource(ctx.instanceSettings, ctx.templateSrv);
|
ctx.ds = new Datasource(ctx.instanceSettings);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When performing testDatasource', () => {
|
describe('When performing testDatasource', () => {
|
||||||
|
@ -1,7 +1,6 @@
|
|||||||
import { TimeSeries, toDataFrame } from '@grafana/data';
|
import { TimeSeries, toDataFrame } from '@grafana/data';
|
||||||
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
|
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
|
||||||
import { getBackendSrv } from '@grafana/runtime';
|
import { getBackendSrv, getTemplateSrv } from '@grafana/runtime';
|
||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
|
||||||
import _ from 'lodash';
|
import _ from 'lodash';
|
||||||
|
|
||||||
import TimegrainConverter from '../time_grain_converter';
|
import TimegrainConverter from '../time_grain_converter';
|
||||||
@ -20,8 +19,7 @@ export default class AppInsightsDatasource {
|
|||||||
applicationId: string;
|
applicationId: string;
|
||||||
logAnalyticsColumns: { [key: string]: LogAnalyticsColumn[] } = {};
|
logAnalyticsColumns: { [key: string]: LogAnalyticsColumn[] } = {};
|
||||||
|
|
||||||
/** @ngInject */
|
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
|
||||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
|
|
||||||
this.id = instanceSettings.id;
|
this.id = instanceSettings.id;
|
||||||
this.applicationId = instanceSettings.jsonData.appInsightsAppId || '';
|
this.applicationId = instanceSettings.jsonData.appInsightsAppId || '';
|
||||||
|
|
||||||
@ -66,7 +64,7 @@ export default class AppInsightsDatasource {
|
|||||||
raw: false,
|
raw: false,
|
||||||
appInsights: {
|
appInsights: {
|
||||||
rawQuery: true,
|
rawQuery: true,
|
||||||
rawQueryString: this.templateSrv.replace(item.rawQueryString, options.scopedVars),
|
rawQueryString: getTemplateSrv().replace(item.rawQueryString, options.scopedVars),
|
||||||
timeColumn: item.timeColumn,
|
timeColumn: item.timeColumn,
|
||||||
valueColumn: item.valueColumn,
|
valueColumn: item.valueColumn,
|
||||||
segmentColumn: item.segmentColumn,
|
segmentColumn: item.segmentColumn,
|
||||||
@ -91,17 +89,19 @@ export default class AppInsightsDatasource {
|
|||||||
item.dimensionFilter = item.filter;
|
item.dimensionFilter = item.filter;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
const templateSrv = getTemplateSrv();
|
||||||
|
|
||||||
return {
|
return {
|
||||||
type: 'timeSeriesQuery',
|
type: 'timeSeriesQuery',
|
||||||
raw: false,
|
raw: false,
|
||||||
appInsights: {
|
appInsights: {
|
||||||
rawQuery: false,
|
rawQuery: false,
|
||||||
timeGrain: this.templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars),
|
timeGrain: templateSrv.replace((item.timeGrain || '').toString(), options.scopedVars),
|
||||||
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
|
allowedTimeGrainsMs: item.allowedTimeGrainsMs,
|
||||||
metricName: this.templateSrv.replace(item.metricName, options.scopedVars),
|
metricName: templateSrv.replace(item.metricName, options.scopedVars),
|
||||||
aggregation: this.templateSrv.replace(item.aggregation, options.scopedVars),
|
aggregation: templateSrv.replace(item.aggregation, options.scopedVars),
|
||||||
dimension: this.templateSrv.replace(item.dimension, options.scopedVars),
|
dimension: templateSrv.replace(item.dimension, options.scopedVars),
|
||||||
dimensionFilter: this.templateSrv.replace(item.dimensionFilter, options.scopedVars),
|
dimensionFilter: templateSrv.replace(item.dimensionFilter, options.scopedVars),
|
||||||
alias: item.alias,
|
alias: item.alias,
|
||||||
format: target.format,
|
format: target.format,
|
||||||
},
|
},
|
||||||
@ -198,7 +198,7 @@ export default class AppInsightsDatasource {
|
|||||||
const appInsightsGroupByQuery = query.match(/^AppInsightsGroupBys\(([^\)]+?)(,\s?([^,]+?))?\)/i);
|
const appInsightsGroupByQuery = query.match(/^AppInsightsGroupBys\(([^\)]+?)(,\s?([^,]+?))?\)/i);
|
||||||
if (appInsightsGroupByQuery) {
|
if (appInsightsGroupByQuery) {
|
||||||
const metricName = appInsightsGroupByQuery[1];
|
const metricName = appInsightsGroupByQuery[1];
|
||||||
return this.getGroupBys(this.templateSrv.replace(metricName));
|
return this.getGroupBys(getTemplateSrv().replace(metricName));
|
||||||
}
|
}
|
||||||
|
|
||||||
return undefined;
|
return undefined;
|
||||||
|
@ -2,12 +2,15 @@ import AzureMonitorDatasource from '../datasource';
|
|||||||
import FakeSchemaData from './__mocks__/schema';
|
import FakeSchemaData from './__mocks__/schema';
|
||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
import { TemplateSrv } from 'app/features/templating/template_srv';
|
||||||
import { KustoSchema, AzureLogsVariable } from '../types';
|
import { KustoSchema, AzureLogsVariable } from '../types';
|
||||||
import { toUtc, getFrameDisplayName } from '@grafana/data';
|
import { toUtc } from '@grafana/data';
|
||||||
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
import { backendSrv } from 'app/core/services/backend_srv'; // will use the version in __mocks__
|
||||||
|
|
||||||
|
const templateSrv = new TemplateSrv();
|
||||||
|
|
||||||
jest.mock('@grafana/runtime', () => ({
|
jest.mock('@grafana/runtime', () => ({
|
||||||
...jest.requireActual('@grafana/runtime'),
|
...jest.requireActual('@grafana/runtime'),
|
||||||
getBackendSrv: () => backendSrv,
|
getBackendSrv: () => backendSrv,
|
||||||
|
getTemplateSrv: () => templateSrv,
|
||||||
}));
|
}));
|
||||||
|
|
||||||
describe('AzureLogAnalyticsDatasource', () => {
|
describe('AzureLogAnalyticsDatasource', () => {
|
||||||
@ -18,9 +21,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
|||||||
datasourceRequestMock.mockImplementation(jest.fn());
|
datasourceRequestMock.mockImplementation(jest.fn());
|
||||||
});
|
});
|
||||||
|
|
||||||
const ctx: any = {
|
const ctx: any = {};
|
||||||
templateSrv: new TemplateSrv(),
|
|
||||||
};
|
|
||||||
|
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
ctx.instanceSettings = {
|
ctx.instanceSettings = {
|
||||||
@ -28,7 +29,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
|||||||
url: 'http://azureloganalyticsapi',
|
url: 'http://azureloganalyticsapi',
|
||||||
};
|
};
|
||||||
|
|
||||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.templateSrv);
|
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When the config option "Same as Azure Monitor" has been chosen', () => {
|
describe('When the config option "Same as Azure Monitor" has been chosen', () => {
|
||||||
@ -67,7 +68,7 @@ describe('AzureLogAnalyticsDatasource', () => {
|
|||||||
ctx.instanceSettings.jsonData.tenantId = 'xxx';
|
ctx.instanceSettings.jsonData.tenantId = 'xxx';
|
||||||
ctx.instanceSettings.jsonData.clientId = 'xxx';
|
ctx.instanceSettings.jsonData.clientId = 'xxx';
|
||||||
ctx.instanceSettings.jsonData.azureLogAnalyticsSameAs = true;
|
ctx.instanceSettings.jsonData.azureLogAnalyticsSameAs = true;
|
||||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, ctx.templateSrv);
|
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||||
|
|
||||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > -1) {
|
||||||
@ -119,112 +120,6 @@ describe('AzureLogAnalyticsDatasource', () => {
|
|||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When performing query', () => {
|
|
||||||
const options = {
|
|
||||||
range: {
|
|
||||||
from: toUtc('2017-08-22T20:00:00Z'),
|
|
||||||
to: toUtc('2017-08-22T23:59:00Z'),
|
|
||||||
},
|
|
||||||
rangeRaw: {
|
|
||||||
from: 'now-4h',
|
|
||||||
to: 'now',
|
|
||||||
},
|
|
||||||
targets: [
|
|
||||||
{
|
|
||||||
apiVersion: '2016-09-01',
|
|
||||||
refId: 'A',
|
|
||||||
queryType: 'Azure Log Analytics',
|
|
||||||
azureLogAnalytics: {
|
|
||||||
resultFormat: 'time_series',
|
|
||||||
query:
|
|
||||||
'AzureActivity | where TimeGenerated > ago(2h) ' +
|
|
||||||
'| summarize count() by Category, bin(TimeGenerated, 5min) ' +
|
|
||||||
'| project TimeGenerated, Category, count_ | order by TimeGenerated asc',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
const response = {
|
|
||||||
results: {
|
|
||||||
A: {
|
|
||||||
refId: 'A',
|
|
||||||
meta: {
|
|
||||||
columns: ['TimeGenerated', 'Computer', 'avg_CounterValue'],
|
|
||||||
subscription: 'xxx',
|
|
||||||
workspace: 'aaaa-1111-bbbb-2222',
|
|
||||||
query:
|
|
||||||
'Perf\r\n| where ObjectName == "Memory" and CounterName == "Available MBytes Memory"\n| where TimeGenerated >= datetime(\'2020-04-23T09:15:20Z\') and TimeGenerated <= datetime(\'2020-04-23T09:20:20Z\')\n| where 1 == 1\n| summarize avg(CounterValue) by bin(TimeGenerated, 1m), Computer \n| order by TimeGenerated asc',
|
|
||||||
encodedQuery: 'gzipped_base64_encoded_query',
|
|
||||||
},
|
|
||||||
series: [
|
|
||||||
{
|
|
||||||
name: 'grafana-vm',
|
|
||||||
points: [
|
|
||||||
[2017.25, 1587633300000],
|
|
||||||
[2048, 1587633360000],
|
|
||||||
[2048.3333333333335, 1587633420000],
|
|
||||||
[2049, 1587633480000],
|
|
||||||
[2049, 1587633540000],
|
|
||||||
[2049, 1587633600000],
|
|
||||||
],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
const workspacesResponse = {
|
|
||||||
value: [
|
|
||||||
{
|
|
||||||
properties: {
|
|
||||||
customerId: 'aaaa-1111-bbbb-2222',
|
|
||||||
},
|
|
||||||
id:
|
|
||||||
'/subscriptions/44693801-6ee6-49de-9b2d-9106972f9572/resourcegroups/defaultresourcegroup/providers/microsoft.operationalinsights/workspaces/aworkspace',
|
|
||||||
name: 'aworkspace',
|
|
||||||
type: 'Microsoft.OperationalInsights/workspaces',
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
|
|
||||||
describe('in time series format', () => {
|
|
||||||
describe('and the data is valid (has time, metric and value columns)', () => {
|
|
||||||
beforeEach(() => {
|
|
||||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
|
||||||
if (options.url.indexOf('Microsoft.OperationalInsights/workspaces') > 0) {
|
|
||||||
return Promise.resolve({ data: workspacesResponse, status: 200 });
|
|
||||||
} else {
|
|
||||||
expect(options.url).toContain('/api/tsdb/query');
|
|
||||||
return Promise.resolve({ data: response, status: 200 });
|
|
||||||
}
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return a list of datapoints', () => {
|
|
||||||
return ctx.ds.query(options).then((results: any) => {
|
|
||||||
expect(results.data.length).toBe(1);
|
|
||||||
expect(getFrameDisplayName(results.data[0])).toEqual('grafana-vm');
|
|
||||||
expect(results.data[0].fields.length).toBe(2);
|
|
||||||
expect(results.data[0].name).toBe('grafana-vm');
|
|
||||||
expect(results.data[0].fields[0].name).toBe('Time');
|
|
||||||
expect(results.data[0].fields[1].name).toBe('Value');
|
|
||||||
expect(results.data[0].fields[0].values.toArray().length).toBe(6);
|
|
||||||
expect(results.data[0].fields[0].values.get(0)).toEqual(1587633300000);
|
|
||||||
expect(results.data[0].fields[1].values.get(0)).toEqual(2017.25);
|
|
||||||
expect(results.data[0].fields[0].values.get(1)).toEqual(1587633360000);
|
|
||||||
expect(results.data[0].fields[1].values.get(1)).toEqual(2048);
|
|
||||||
expect(results.data[0].fields[0].config.links[0].title).toEqual('View in Azure Portal');
|
|
||||||
expect(results.data[0].fields[0].config.links[0].targetBlank).toBe(true);
|
|
||||||
expect(results.data[0].fields[0].config.links[0].url).toEqual(
|
|
||||||
'https://portal.azure.com/#blade/Microsoft_OperationsManagementSuite_Workspace/AnalyticsBlade/initiator/AnalyticsShareLinkToQuery/isQueryEditorVisible/true/scope/%7B%22resources%22%3A%5B%7B%22resourceId%22%3A%22%2Fsubscriptions%2Fxxx%2Fresourcegroups%2Fdefaultresourcegroup%2Fproviders%2Fmicrosoft.operationalinsights%2Fworkspaces%2Faworkspace%22%7D%5D%7D/query/gzipped_base64_encoded_query/isQueryBase64Compressed/true/timespanInIsoFormat/P1D'
|
|
||||||
);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('When performing getSchema', () => {
|
describe('When performing getSchema', () => {
|
||||||
beforeEach(() => {
|
beforeEach(() => {
|
||||||
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
datasourceRequestMock.mockImplementation((options: { url: string }) => {
|
||||||
|
@ -2,13 +2,19 @@ import _ from 'lodash';
|
|||||||
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
|
import LogAnalyticsQuerystringBuilder from '../log_analytics/querystring_builder';
|
||||||
import ResponseParser from './response_parser';
|
import ResponseParser from './response_parser';
|
||||||
import { AzureMonitorQuery, AzureDataSourceJsonData, AzureLogsVariable } from '../types';
|
import { AzureMonitorQuery, AzureDataSourceJsonData, AzureLogsVariable } from '../types';
|
||||||
import { TimeSeries, toDataFrame } from '@grafana/data';
|
import {
|
||||||
import { DataQueryRequest, DataQueryResponseData, DataSourceInstanceSettings } from '@grafana/data';
|
DataQueryResponse,
|
||||||
import { getBackendSrv } from '@grafana/runtime';
|
ScopedVars,
|
||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
DataSourceInstanceSettings,
|
||||||
|
QueryResultMeta,
|
||||||
|
MetricFindValue,
|
||||||
|
} from '@grafana/data';
|
||||||
|
import { getBackendSrv, getTemplateSrv, DataSourceWithBackend } from '@grafana/runtime';
|
||||||
|
|
||||||
export default class AzureLogAnalyticsDatasource {
|
export default class AzureLogAnalyticsDatasource extends DataSourceWithBackend<
|
||||||
id: number;
|
AzureMonitorQuery,
|
||||||
|
AzureDataSourceJsonData
|
||||||
|
> {
|
||||||
url: string;
|
url: string;
|
||||||
baseUrl: string;
|
baseUrl: string;
|
||||||
applicationId: string;
|
applicationId: string;
|
||||||
@ -17,12 +23,8 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
subscriptionId: string;
|
subscriptionId: string;
|
||||||
cache: Map<string, any>;
|
cache: Map<string, any>;
|
||||||
|
|
||||||
/** @ngInject */
|
constructor(private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
|
||||||
constructor(
|
super(instanceSettings);
|
||||||
private instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>,
|
|
||||||
private templateSrv: TemplateSrv
|
|
||||||
) {
|
|
||||||
this.id = instanceSettings.id;
|
|
||||||
this.cache = new Map();
|
this.cache = new Map();
|
||||||
|
|
||||||
switch (this.instanceSettings.jsonData.cloudName) {
|
switch (this.instanceSettings.jsonData.cloudName) {
|
||||||
@ -88,7 +90,7 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
}
|
}
|
||||||
|
|
||||||
getWorkspaceList(subscription: string): Promise<any> {
|
getWorkspaceList(subscription: string): Promise<any> {
|
||||||
const subscriptionId = this.templateSrv.replace(subscription || this.subscriptionId);
|
const subscriptionId = getTemplateSrv().replace(subscription || this.subscriptionId);
|
||||||
|
|
||||||
const workspaceListUrl =
|
const workspaceListUrl =
|
||||||
this.azureMonitorUrl +
|
this.azureMonitorUrl +
|
||||||
@ -100,103 +102,70 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
if (!workspace) {
|
if (!workspace) {
|
||||||
return Promise.resolve();
|
return Promise.resolve();
|
||||||
}
|
}
|
||||||
const url = `${this.baseUrl}/${this.templateSrv.replace(workspace, {})}/metadata`;
|
const url = `${this.baseUrl}/${getTemplateSrv().replace(workspace, {})}/metadata`;
|
||||||
|
|
||||||
return this.doRequest(url).then((response: any) => {
|
return this.doRequest(url).then((response: any) => {
|
||||||
return new ResponseParser(response.data).parseSchemaResult();
|
return new ResponseParser(response.data).parseSchemaResult();
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
async query(options: DataQueryRequest<AzureMonitorQuery>) {
|
filterQuery(item: AzureMonitorQuery): boolean {
|
||||||
const queries = _.filter(options.targets, item => {
|
return item.hide !== true && !!item.azureLogAnalytics;
|
||||||
return item.hide !== true;
|
|
||||||
}).map(target => {
|
|
||||||
const item = target.azureLogAnalytics;
|
|
||||||
|
|
||||||
let workspace = this.templateSrv.replace(item.workspace, options.scopedVars);
|
|
||||||
|
|
||||||
if (!workspace && this.defaultOrFirstWorkspace) {
|
|
||||||
workspace = this.defaultOrFirstWorkspace;
|
|
||||||
}
|
|
||||||
|
|
||||||
const subscriptionId = this.templateSrv.replace(target.subscription || this.subscriptionId, options.scopedVars);
|
|
||||||
const query = this.templateSrv.replace(item.query, options.scopedVars, this.interpolateVariable);
|
|
||||||
|
|
||||||
return {
|
|
||||||
refId: target.refId,
|
|
||||||
intervalMs: options.intervalMs,
|
|
||||||
maxDataPoints: options.maxDataPoints,
|
|
||||||
datasourceId: this.id,
|
|
||||||
format: target.format,
|
|
||||||
queryType: 'Azure Log Analytics',
|
|
||||||
subscriptionId: subscriptionId,
|
|
||||||
azureLogAnalytics: {
|
|
||||||
resultFormat: item.resultFormat,
|
|
||||||
query: query,
|
|
||||||
workspace: workspace,
|
|
||||||
},
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!queries || queries.length === 0) {
|
|
||||||
return [];
|
|
||||||
}
|
|
||||||
|
|
||||||
const { data } = await getBackendSrv().datasourceRequest({
|
|
||||||
url: '/api/tsdb/query',
|
|
||||||
method: 'POST',
|
|
||||||
data: {
|
|
||||||
from: options.range.from.valueOf().toString(),
|
|
||||||
to: options.range.to.valueOf().toString(),
|
|
||||||
queries,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
const result: DataQueryResponseData[] = [];
|
|
||||||
if (data.results) {
|
|
||||||
const results: any[] = Object.values(data.results);
|
|
||||||
for (let queryRes of results) {
|
|
||||||
for (let series of queryRes.series || []) {
|
|
||||||
const timeSeries: TimeSeries = {
|
|
||||||
target: series.name,
|
|
||||||
datapoints: series.points,
|
|
||||||
refId: queryRes.refId,
|
|
||||||
meta: queryRes.meta,
|
|
||||||
};
|
|
||||||
const df = toDataFrame(timeSeries);
|
|
||||||
|
|
||||||
if (queryRes.meta.encodedQuery && queryRes.meta.encodedQuery.length > 0) {
|
|
||||||
const url = await this.buildDeepLink(queryRes);
|
|
||||||
|
|
||||||
if (url.length > 0) {
|
|
||||||
for (const field of df.fields) {
|
|
||||||
field.config.links = [
|
|
||||||
{
|
|
||||||
url: url,
|
|
||||||
title: 'View in Azure Portal',
|
|
||||||
targetBlank: true,
|
|
||||||
},
|
|
||||||
];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
result.push(df);
|
|
||||||
}
|
|
||||||
|
|
||||||
for (let table of queryRes.tables || []) {
|
|
||||||
result.push(toDataFrame(table));
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private async buildDeepLink(queryRes: any) {
|
applyTemplateVariables(target: AzureMonitorQuery, scopedVars: ScopedVars): Record<string, any> {
|
||||||
const base64Enc = encodeURIComponent(queryRes.meta.encodedQuery);
|
const item = target.azureLogAnalytics;
|
||||||
const workspaceId = queryRes.meta.workspace;
|
|
||||||
const subscription = queryRes.meta.subscription;
|
const templateSrv = getTemplateSrv();
|
||||||
|
let workspace = templateSrv.replace(item.workspace, scopedVars);
|
||||||
|
|
||||||
|
if (!workspace && this.defaultOrFirstWorkspace) {
|
||||||
|
workspace = this.defaultOrFirstWorkspace;
|
||||||
|
}
|
||||||
|
|
||||||
|
const subscriptionId = templateSrv.replace(target.subscription || this.subscriptionId, scopedVars);
|
||||||
|
const query = templateSrv.replace(item.query, scopedVars, this.interpolateVariable);
|
||||||
|
|
||||||
|
return {
|
||||||
|
refId: target.refId,
|
||||||
|
format: target.format,
|
||||||
|
queryType: 'Azure Log Analytics',
|
||||||
|
subscriptionId: subscriptionId,
|
||||||
|
azureLogAnalytics: {
|
||||||
|
resultFormat: item.resultFormat,
|
||||||
|
query: query,
|
||||||
|
workspace: workspace,
|
||||||
|
},
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async processResponse(res: DataQueryResponse): Promise<DataQueryResponse> {
|
||||||
|
if (res.data) {
|
||||||
|
for (const df of res.data) {
|
||||||
|
const encodedQuery = df.meta?.custom?.encodedQuery;
|
||||||
|
if (encodedQuery && encodedQuery.length > 0) {
|
||||||
|
const url = await this.buildDeepLink(df.meta);
|
||||||
|
if (url?.length) {
|
||||||
|
for (const field of df.fields) {
|
||||||
|
field.config.links = [
|
||||||
|
{
|
||||||
|
url: url,
|
||||||
|
title: 'View in Azure Portal',
|
||||||
|
targetBlank: true,
|
||||||
|
},
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return res;
|
||||||
|
}
|
||||||
|
|
||||||
|
private async buildDeepLink(meta: QueryResultMeta) {
|
||||||
|
const base64Enc = encodeURIComponent(meta.custom.encodedQuery);
|
||||||
|
const workspaceId = meta.custom.workspace;
|
||||||
|
const subscription = meta.custom.subscription;
|
||||||
|
|
||||||
const details = await this.getWorkspaceDetails(workspaceId);
|
const details = await this.getWorkspaceDetails(workspaceId);
|
||||||
if (!details.workspace || !details.resourceGroup) {
|
if (!details.workspace || !details.resourceGroup) {
|
||||||
@ -235,7 +204,7 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
metricFindQuery(query: string) {
|
metricFindQuery(query: string): Promise<MetricFindValue[]> {
|
||||||
const workspacesQuery = query.match(/^workspaces\(\)/i);
|
const workspacesQuery = query.match(/^workspaces\(\)/i);
|
||||||
if (workspacesQuery) {
|
if (workspacesQuery) {
|
||||||
return this.getWorkspaces(this.subscriptionId);
|
return this.getWorkspaces(this.subscriptionId);
|
||||||
@ -268,12 +237,12 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
throw { message: err.error.data.error.message };
|
throw { message: err.error.data.error.message };
|
||||||
}
|
}
|
||||||
});
|
});
|
||||||
});
|
}) as Promise<MetricFindValue[]>; // ??
|
||||||
}
|
}
|
||||||
|
|
||||||
private buildQuery(query: string, options: any, workspace: any) {
|
private buildQuery(query: string, options: any, workspace: any) {
|
||||||
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
|
const querystringBuilder = new LogAnalyticsQuerystringBuilder(
|
||||||
this.templateSrv.replace(query, {}, this.interpolateVariable),
|
getTemplateSrv().replace(query, {}, this.interpolateVariable),
|
||||||
options,
|
options,
|
||||||
'TimeGenerated'
|
'TimeGenerated'
|
||||||
);
|
);
|
||||||
@ -382,10 +351,10 @@ export default class AzureLogAnalyticsDatasource {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
testDatasource() {
|
testDatasource(): Promise<any> {
|
||||||
const validationError = this.isValidConfig();
|
const validationError = this.isValidConfig();
|
||||||
if (validationError) {
|
if (validationError) {
|
||||||
return validationError;
|
return Promise.resolve(validationError);
|
||||||
}
|
}
|
||||||
|
|
||||||
return this.getDefaultOrFirstWorkspace()
|
return this.getDefaultOrFirstWorkspace()
|
||||||
|
@ -30,7 +30,7 @@ describe('AzureMonitorDatasource', () => {
|
|||||||
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f' },
|
jsonData: { subscriptionId: '9935389e-9122-4ef9-95f9-1513dd24753f' },
|
||||||
cloudName: 'azuremonitor',
|
cloudName: 'azuremonitor',
|
||||||
} as unknown) as DataSourceInstanceSettings<AzureDataSourceJsonData>;
|
} as unknown) as DataSourceInstanceSettings<AzureDataSourceJsonData>;
|
||||||
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings, templateSrv);
|
ctx.ds = new AzureMonitorDatasource(ctx.instanceSettings);
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('When performing testDatasource', () => {
|
describe('When performing testDatasource', () => {
|
||||||
|
@ -10,7 +10,6 @@ import {
|
|||||||
DataQueryResponse,
|
DataQueryResponse,
|
||||||
DataQueryResponseData,
|
DataQueryResponseData,
|
||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import { TemplateSrv } from 'app/features/templating/template_srv';
|
|
||||||
import { Observable } from 'rxjs';
|
import { Observable } from 'rxjs';
|
||||||
|
|
||||||
export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDataSourceJsonData> {
|
export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDataSourceJsonData> {
|
||||||
@ -18,12 +17,11 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
|||||||
appInsightsDatasource: AppInsightsDatasource;
|
appInsightsDatasource: AppInsightsDatasource;
|
||||||
azureLogAnalyticsDatasource: AzureLogAnalyticsDatasource;
|
azureLogAnalyticsDatasource: AzureLogAnalyticsDatasource;
|
||||||
|
|
||||||
/** @ngInject */
|
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>) {
|
||||||
constructor(instanceSettings: DataSourceInstanceSettings<AzureDataSourceJsonData>, private templateSrv: TemplateSrv) {
|
|
||||||
super(instanceSettings);
|
super(instanceSettings);
|
||||||
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings);
|
this.azureMonitorDatasource = new AzureMonitorDatasource(instanceSettings);
|
||||||
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings, this.templateSrv);
|
this.appInsightsDatasource = new AppInsightsDatasource(instanceSettings);
|
||||||
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(instanceSettings, this.templateSrv);
|
this.azureLogAnalyticsDatasource = new AzureLogAnalyticsDatasource(instanceSettings);
|
||||||
}
|
}
|
||||||
|
|
||||||
query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponse> | Observable<DataQueryResponseData> {
|
query(options: DataQueryRequest<AzureMonitorQuery>): Promise<DataQueryResponse> | Observable<DataQueryResponseData> {
|
||||||
@ -44,10 +42,13 @@ export default class Datasource extends DataSourceApi<AzureMonitorQuery, AzureDa
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (azureLogAnalyticsOptions.targets.length > 0) {
|
if (azureLogAnalyticsOptions.targets.length > 0) {
|
||||||
const alaPromise = this.azureLogAnalyticsDatasource.query(azureLogAnalyticsOptions);
|
const obs = this.azureLogAnalyticsDatasource.query(azureLogAnalyticsOptions);
|
||||||
if (alaPromise) {
|
if (!promises.length) {
|
||||||
promises.push(alaPromise);
|
return obs; // return the observable directly
|
||||||
}
|
}
|
||||||
|
// NOTE: this only includes the data!
|
||||||
|
// When all three query types are ready to be observale, they should all use observable
|
||||||
|
promises.push(obs.toPromise().then(r => r.data));
|
||||||
}
|
}
|
||||||
|
|
||||||
if (azureMonitorOptions.targets.length > 0) {
|
if (azureMonitorOptions.targets.length > 0) {
|
||||||
|
Reference in New Issue
Block a user