feat(testdata): lots of work on new test data data source and scenarios

This commit is contained in:
Torkel Ödegaard
2016-09-27 18:17:39 +02:00
parent ade8aa5b92
commit 3ecd96e682
16 changed files with 257 additions and 123 deletions

View File

@ -244,7 +244,8 @@ func Register(r *macaron.Macaron) {
r.Get("/search/", Search) r.Get("/search/", Search)
// metrics // metrics
r.Get("/metrics/test", wrap(GetTestMetrics)) r.Post("/tsdb/query", bind(dtos.MetricRequest{}), wrap(QueryMetrics))
r.Get("/tsdb/testdata/scenarios", wrap(GetTestDataScenarios))
// metrics // metrics
r.Get("/metrics", wrap(GetInternalMetrics)) r.Get("/metrics", wrap(GetInternalMetrics))

View File

@ -96,8 +96,10 @@ func (slice DataSourceList) Swap(i, j int) {
slice[i], slice[j] = slice[j], slice[i] slice[i], slice[j] = slice[j], slice[i]
} }
type MetricQueryResultDto struct { type MetricRequest struct {
Data []interface{} `json:"data"` From string `json:"from"`
To string `json:"to"`
Queries []*simplejson.Json `json:"queries"`
} }
type UserStars struct { type UserStars struct {

View File

@ -8,43 +8,47 @@ import (
"github.com/grafana/grafana/pkg/metrics" "github.com/grafana/grafana/pkg/metrics"
"github.com/grafana/grafana/pkg/middleware" "github.com/grafana/grafana/pkg/middleware"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
"github.com/grafana/grafana/pkg/tsdb/testdata"
"github.com/grafana/grafana/pkg/util" "github.com/grafana/grafana/pkg/util"
) )
func GetTestMetrics(c *middleware.Context) Response { // POST /api/tsdb/query
func QueryMetrics(c *middleware.Context, reqDto dtos.MetricRequest) Response {
timeRange := tsdb.NewTimeRange(reqDto.From, reqDto.To)
timeRange := tsdb.NewTimeRange(c.Query("from"), c.Query("to")) request := &tsdb.Request{TimeRange: timeRange}
req := &tsdb.Request{ for _, query := range reqDto.Queries {
TimeRange: timeRange, request.Queries = append(request.Queries, &tsdb.Query{
Queries: []*tsdb.Query{ RefId: query.Get("refId").MustString("A"),
{ MaxDataPoints: query.Get("maxDataPoints").MustInt64(100),
RefId: "A", IntervalMs: query.Get("intervalMs").MustInt64(1000),
MaxDataPoints: c.QueryInt64("maxDataPoints"), Model: query,
IntervalMs: c.QueryInt64("intervalMs"), DataSource: &tsdb.DataSourceInfo{
DataSource: &tsdb.DataSourceInfo{ Name: "Grafana TestDataDB",
Name: "Grafana TestDataDB", PluginId: "grafana-testdata-datasource",
PluginId: "grafana-testdata-datasource",
},
}, },
}, })
} }
resp, err := tsdb.HandleRequest(req) resp, err := tsdb.HandleRequest(request)
if err != nil { if err != nil {
return ApiError(500, "Metric request error", err) return ApiError(500, "Metric request error", err)
} }
result := dtos.MetricQueryResultDto{} return Json(200, &resp)
}
for _, v := range resp.Results { // GET /api/tsdb/testdata/scenarios
if v.Error != nil { func GetTestDataScenarios(c *middleware.Context) Response {
return ApiError(500, "tsdb.HandleRequest() response error", v.Error) result := make([]interface{}, 0)
}
for _, series := range v.Series { for _, scenario := range testdata.ScenarioRegistry {
result.Data = append(result.Data, series) result = append(result, map[string]interface{}{
} "id": scenario.Id,
"name": scenario.Name,
"description": scenario.Description,
})
} }
return Json(200, &result) return Json(200, &result)

View File

@ -69,7 +69,7 @@ func (c *QueryCondition) Eval(context *alerting.EvalContext) {
context.Firing = len(context.EvalMatches) > 0 context.Firing = len(context.EvalMatches) > 0
} }
func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) { func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange *tsdb.TimeRange) (tsdb.TimeSeriesSlice, error) {
getDsInfo := &m.GetDataSourceByIdQuery{ getDsInfo := &m.GetDataSourceByIdQuery{
Id: c.Query.DatasourceId, Id: c.Query.DatasourceId,
OrgId: context.Rule.OrgId, OrgId: context.Rule.OrgId,
@ -105,9 +105,9 @@ func (c *QueryCondition) executeQuery(context *alerting.EvalContext, timeRange t
return result, nil return result, nil
} }
func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timerange tsdb.TimeRange) *tsdb.Request { func (c *QueryCondition) getRequestForAlertRule(datasource *m.DataSource, timeRange *tsdb.TimeRange) *tsdb.Request {
req := &tsdb.Request{ req := &tsdb.Request{
TimeRange: timerange, TimeRange: timeRange,
Queries: []*tsdb.Query{ Queries: []*tsdb.Query{
{ {
RefId: "A", RefId: "A",

View File

@ -4,7 +4,6 @@ import "github.com/grafana/grafana/pkg/components/simplejson"
type Query struct { type Query struct {
RefId string RefId string
Query string
Model *simplejson.Json Model *simplejson.Json
Depends []string Depends []string
DataSource *DataSourceInfo DataSource *DataSourceInfo
@ -17,13 +16,13 @@ type Query struct {
type QuerySlice []*Query type QuerySlice []*Query
type Request struct { type Request struct {
TimeRange TimeRange TimeRange *TimeRange
Queries QuerySlice Queries QuerySlice
} }
type Response struct { type Response struct {
BatchTimings []*BatchTiming BatchTimings []*BatchTiming `json:"timings"`
Results map[string]*QueryResult Results map[string]*QueryResult `json:"results"`
} }
type DataSourceInfo struct { type DataSourceInfo struct {
@ -50,14 +49,14 @@ type BatchResult struct {
} }
type QueryResult struct { type QueryResult struct {
Error error Error error `json:"error"`
RefId string RefId string `json:"refId"`
Series TimeSeriesSlice Series TimeSeriesSlice `json:"series"`
} }
type TimeSeries struct { type TimeSeries struct {
Name string `json:"target"` Name string `json:"name"`
Points [][2]*float64 `json:"datapoints"` Points [][2]*float64 `json:"points"`
} }
type TimeSeriesSlice []*TimeSeries type TimeSeriesSlice []*TimeSeries

View File

@ -10,8 +10,8 @@ import (
"github.com/grafana/grafana/pkg/log" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
"github.com/prometheus/client_golang/api/prometheus" "github.com/prometheus/client_golang/api/prometheus"
"golang.org/x/net/context"
pmodel "github.com/prometheus/common/model" pmodel "github.com/prometheus/common/model"
"golang.org/x/net/context"
) )
type PrometheusExecutor struct { type PrometheusExecutor struct {
@ -111,12 +111,12 @@ func parseQuery(queries tsdb.QuerySlice, queryContext *tsdb.QueryContext) (*Prom
return nil, err return nil, err
} }
start, err := queryContext.TimeRange.FromTime() start, err := queryContext.TimeRange.ParseFrom()
if err != nil { if err != nil {
return nil, err return nil, err
} }
end, err := queryContext.TimeRange.ToTime() end, err := queryContext.TimeRange.ParseTo()
if err != nil { if err != nil {
return nil, err return nil, err
} }

View File

@ -3,7 +3,7 @@ package tsdb
import "sync" import "sync"
type QueryContext struct { type QueryContext struct {
TimeRange TimeRange TimeRange *TimeRange
Queries QuerySlice Queries QuerySlice
Results map[string]*QueryResult Results map[string]*QueryResult
ResultsChan chan *BatchResult ResultsChan chan *BatchResult
@ -11,7 +11,7 @@ type QueryContext struct {
BatchWaits sync.WaitGroup BatchWaits sync.WaitGroup
} }
func NewQueryContext(queries QuerySlice, timeRange TimeRange) *QueryContext { func NewQueryContext(queries QuerySlice, timeRange *TimeRange) *QueryContext {
return &QueryContext{ return &QueryContext{
TimeRange: timeRange, TimeRange: timeRange,
Queries: queries, Queries: queries,

98
pkg/tsdb/testdata/scenarios.go vendored Normal file
View File

@ -0,0 +1,98 @@
package testdata
import (
"math/rand"
"time"
"github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb"
)
type ScenarioHandler func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult
type Scenario struct {
Id string `json:"id"`
Name string `json:"name"`
Description string `json:"description"`
Handler ScenarioHandler `json:"-"`
}
var ScenarioRegistry map[string]*Scenario
func init() {
ScenarioRegistry = make(map[string]*Scenario)
logger := log.New("tsdb.testdata")
registerScenario(&Scenario{
Id: "random_walk",
Name: "Random Walk",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
timeWalkerMs := context.TimeRange.MustGetFrom().Unix() * 1000
to := context.TimeRange.MustGetTo().Unix() * 1000
series := newSeriesForQuery(query)
points := make([][2]*float64, 0)
walker := rand.Float64() * 100
for i := int64(0); i < 10000 && timeWalkerMs < to; i++ {
timestamp := float64(timeWalkerMs)
val := float64(walker)
points = append(points, [2]*float64{&val, &timestamp})
walker += rand.Float64() - 0.5
timeWalkerMs += query.IntervalMs
}
series.Points = points
queryRes := &tsdb.QueryResult{}
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
registerScenario(&Scenario{
Id: "no_data_points",
Name: "No Data Points",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
return &tsdb.QueryResult{
Series: make(tsdb.TimeSeriesSlice, 0),
}
},
})
registerScenario(&Scenario{
Id: "datapoints_outside_range",
Name: "Datapoints Outside Range",
Handler: func(query *tsdb.Query, context *tsdb.QueryContext) *tsdb.QueryResult {
queryRes := &tsdb.QueryResult{}
series := newSeriesForQuery(query)
outsideTime := context.TimeRange.MustGetFrom().Add(-1*time.Hour).Unix() * 1000
timestamp := float64(outsideTime)
logger.Info("time", "from", timestamp)
val := float64(10)
series.Points = append(series.Points, [2]*float64{&val, &timestamp})
queryRes.Series = append(queryRes.Series, series)
return queryRes
},
})
}
func registerScenario(scenario *Scenario) {
ScenarioRegistry[scenario.Id] = scenario
}
func newSeriesForQuery(query *tsdb.Query) *tsdb.TimeSeries {
alias := query.Model.Get("alias").MustString("")
if alias == "" {
alias = query.RefId + "-series"
}
return &tsdb.TimeSeries{Name: alias}
}

View File

@ -1,17 +1,20 @@
package testdata package testdata
import ( import (
"math/rand" "github.com/grafana/grafana/pkg/log"
"github.com/grafana/grafana/pkg/tsdb" "github.com/grafana/grafana/pkg/tsdb"
) )
type TestDataExecutor struct { type TestDataExecutor struct {
*tsdb.DataSourceInfo *tsdb.DataSourceInfo
log log.Logger
} }
func NewTestDataExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor { func NewTestDataExecutor(dsInfo *tsdb.DataSourceInfo) tsdb.Executor {
return &TestDataExecutor{dsInfo} return &TestDataExecutor{
DataSourceInfo: dsInfo,
log: log.New("tsdb.testdata"),
}
} }
func init() { func init() {
@ -22,33 +25,15 @@ func (e *TestDataExecutor) Execute(queries tsdb.QuerySlice, context *tsdb.QueryC
result := &tsdb.BatchResult{} result := &tsdb.BatchResult{}
result.QueryResults = make(map[string]*tsdb.QueryResult) result.QueryResults = make(map[string]*tsdb.QueryResult)
from, _ := context.TimeRange.FromTime()
to, _ := context.TimeRange.ToTime()
queryRes := &tsdb.QueryResult{}
for _, query := range queries { for _, query := range queries {
// scenario := query.Model.Get("scenario").MustString("random_walk") scenarioId := query.Model.Get("scenarioId").MustString("random_walk")
series := &tsdb.TimeSeries{Name: "test-series-0"} if scenario, exist := ScenarioRegistry[scenarioId]; exist {
result.QueryResults[query.RefId] = scenario.Handler(query, context)
stepInSeconds := (to.Unix() - from.Unix()) / query.MaxDataPoints result.QueryResults[query.RefId].RefId = query.RefId
points := make([][2]*float64, 0) } else {
walker := rand.Float64() * 100 e.log.Error("Scenario not found", "scenarioId", scenarioId)
time := from.Unix()
for i := int64(0); i < query.MaxDataPoints; i++ {
timestamp := float64(time)
val := float64(walker)
points = append(points, [2]*float64{&val, &timestamp})
walker += rand.Float64() - 0.5
time += stepInSeconds
} }
series.Points = points
queryRes.Series = append(queryRes.Series, series)
} }
result.QueryResults["A"] = queryRes
return result return result
} }

View File

@ -7,8 +7,8 @@ import (
"time" "time"
) )
func NewTimeRange(from, to string) TimeRange { func NewTimeRange(from, to string) *TimeRange {
return TimeRange{ return &TimeRange{
From: from, From: from,
To: to, To: to,
Now: time.Now(), Now: time.Now(),
@ -21,13 +21,37 @@ type TimeRange struct {
Now time.Time Now time.Time
} }
func (tr TimeRange) FromTime() (time.Time, error) { func (tr *TimeRange) MustGetFrom() time.Time {
if val, err := strconv.ParseInt(tr.From, 10, 64); err == nil { if res, err := tr.ParseFrom(); err != nil {
return time.Unix(val, 0), nil return time.Unix(0, 0)
} else {
return res
}
}
func (tr *TimeRange) MustGetTo() time.Time {
if res, err := tr.ParseTo(); err != nil {
return time.Unix(0, 0)
} else {
return res
}
}
func tryParseUnixMsEpoch(val string) (time.Time, bool) {
if val, err := strconv.ParseInt(val, 10, 64); err == nil {
seconds := val / 1000
nano := (val - seconds*1000) * 1000000
return time.Unix(seconds, nano), true
}
return time.Time{}, false
}
func (tr *TimeRange) ParseFrom() (time.Time, error) {
if res, ok := tryParseUnixMsEpoch(tr.From); ok {
return res, nil
} }
fromRaw := strings.Replace(tr.From, "now-", "", 1) fromRaw := strings.Replace(tr.From, "now-", "", 1)
diff, err := time.ParseDuration("-" + fromRaw) diff, err := time.ParseDuration("-" + fromRaw)
if err != nil { if err != nil {
return time.Time{}, err return time.Time{}, err
@ -36,7 +60,7 @@ func (tr TimeRange) FromTime() (time.Time, error) {
return tr.Now.Add(diff), nil return tr.Now.Add(diff), nil
} }
func (tr TimeRange) ToTime() (time.Time, error) { func (tr *TimeRange) ParseTo() (time.Time, error) {
if tr.To == "now" { if tr.To == "now" {
return tr.Now, nil return tr.Now, nil
} else if strings.HasPrefix(tr.To, "now-") { } else if strings.HasPrefix(tr.To, "now-") {
@ -50,8 +74,8 @@ func (tr TimeRange) ToTime() (time.Time, error) {
return tr.Now.Add(diff), nil return tr.Now.Add(diff), nil
} }
if val, err := strconv.ParseInt(tr.To, 10, 64); err == nil { if res, ok := tryParseUnixMsEpoch(tr.To); ok {
return time.Unix(val, 0), nil return res, nil
} }
return time.Time{}, fmt.Errorf("cannot parse to value %s", tr.To) return time.Time{}, fmt.Errorf("cannot parse to value %s", tr.To)

View File

@ -23,13 +23,13 @@ func TestTimeRange(t *testing.T) {
fiveMinAgo, _ := time.ParseDuration("-5m") fiveMinAgo, _ := time.ParseDuration("-5m")
expected := now.Add(fiveMinAgo) expected := now.Add(fiveMinAgo)
res, err := tr.FromTime() res, err := tr.ParseFrom()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix()) So(res.Unix(), ShouldEqual, expected.Unix())
}) })
Convey("now ", func() { Convey("now ", func() {
res, err := tr.ToTime() res, err := tr.ParseTo()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, now.Unix()) So(res.Unix(), ShouldEqual, now.Unix())
}) })
@ -46,7 +46,7 @@ func TestTimeRange(t *testing.T) {
fiveHourAgo, _ := time.ParseDuration("-5h") fiveHourAgo, _ := time.ParseDuration("-5h")
expected := now.Add(fiveHourAgo) expected := now.Add(fiveHourAgo)
res, err := tr.FromTime() res, err := tr.ParseFrom()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix()) So(res.Unix(), ShouldEqual, expected.Unix())
}) })
@ -54,7 +54,7 @@ func TestTimeRange(t *testing.T) {
Convey("now-10m ", func() { Convey("now-10m ", func() {
fiveMinAgo, _ := time.ParseDuration("-10m") fiveMinAgo, _ := time.ParseDuration("-10m")
expected := now.Add(fiveMinAgo) expected := now.Add(fiveMinAgo)
res, err := tr.ToTime() res, err := tr.ParseTo()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, expected.Unix()) So(res.Unix(), ShouldEqual, expected.Unix())
}) })
@ -68,13 +68,13 @@ func TestTimeRange(t *testing.T) {
Now: now, Now: now,
} }
res, err := tr.FromTime() res, err := tr.ParseFrom()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, 1474973725473) So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474973725473)
res, err = tr.ToTime() res, err = tr.ParseTo()
So(err, ShouldBeNil) So(err, ShouldBeNil)
So(res.Unix(), ShouldEqual, 1474975757930) So(res.UnixNano()/int64(time.Millisecond), ShouldEqual, 1474975757930)
}) })
Convey("Cannot parse asdf", func() { Convey("Cannot parse asdf", func() {
@ -85,10 +85,10 @@ func TestTimeRange(t *testing.T) {
Now: now, Now: now,
} }
_, err = tr.FromTime() _, err = tr.ParseFrom()
So(err, ShouldNotBeNil) So(err, ShouldNotBeNil)
_, err = tr.ToTime() _, err = tr.ParseTo()
So(err, ShouldNotBeNil) So(err, ShouldNotBeNil)
}) })
}) })

View File

@ -14,9 +14,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given 3 queries for 2 data sources", func() { Convey("Given 3 queries for 2 data sources", func() {
request := &Request{ request := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 2}},
}, },
} }
@ -31,9 +31,9 @@ func TestMetricQuery(t *testing.T) {
Convey("Given query 2 depends on query 1", func() { Convey("Given query 2 depends on query 1", func() {
request := &Request{ request := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 2}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 2}},
{RefId: "C", Query: "#A / #B", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 3}, Depends: []string{"A", "B"}},
}, },
} }
@ -55,7 +55,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing request with one query", t, func() { Convey("When executing request with one query", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
}, },
} }
@ -74,8 +74,8 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with two queries from same data source", t, func() { Convey("When executing one request with two queries from same data source", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
}, },
} }
@ -100,9 +100,9 @@ func TestMetricQuery(t *testing.T) {
Convey("When executing one request with three queries from different datasources", t, func() { Convey("When executing one request with three queries from different datasources", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "B", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}}, {RefId: "B", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}},
{RefId: "C", Query: "asd", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}}, {RefId: "C", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}},
}, },
} }
@ -117,7 +117,7 @@ func TestMetricQuery(t *testing.T) {
Convey("When query uses data source of unknown type", t, func() { Convey("When query uses data source of unknown type", t, func() {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}}, {RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "asdasdas"}},
}, },
} }
@ -129,10 +129,10 @@ func TestMetricQuery(t *testing.T) {
req := &Request{ req := &Request{
Queries: QuerySlice{ Queries: QuerySlice{
{ {
RefId: "A", Query: "asd", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"}, RefId: "A", DataSource: &DataSourceInfo{Id: 1, PluginId: "test"},
}, },
{ {
RefId: "B", Query: "#A / 2", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"}, RefId: "B", DataSource: &DataSourceInfo{Id: 2, PluginId: "test"}, Depends: []string{"A"},
}, },
}, },
} }

View File

@ -10,18 +10,38 @@ class TestDataDatasource {
query(options) { query(options) {
var queries = _.filter(options.targets, item => { var queries = _.filter(options.targets, item => {
return item.hide !== true; return item.hide !== true;
}).map(item => {
return {
refId: item.refId,
scenarioId: item.scenarioId,
intervalMs: options.intervalMs,
maxDataPoints: options.maxDataPoints,
};
}); });
if (queries.length === 0) { if (queries.length === 0) {
return this.$q.when({data: []}); return this.$q.when({data: []});
} }
return this.backendSrv.get('/api/metrics/test', { return this.backendSrv.post('/api/tsdb/query', {
from: options.range.from.valueOf(), from: options.range.from.valueOf().toString(),
to: options.range.to.valueOf(), to: options.range.to.valueOf().toString(),
scenario: options.targets[0].scenario, queries: queries,
interval: options.intervalMs, }).then(res => {
maxDataPoints: options.maxDataPoints, var data = [];
if (res.results) {
_.forEach(res.results, queryRes => {
for (let series of queryRes.series) {
data.push({
target: series.name,
datapoints: series.points
});
}
});
}
return {data: data};
}); });
} }

View File

@ -6,19 +6,20 @@ import {QueryCtrl} from 'app/plugins/sdk';
export class TestDataQueryCtrl extends QueryCtrl { export class TestDataQueryCtrl extends QueryCtrl {
static templateUrl = 'partials/query.editor.html'; static templateUrl = 'partials/query.editor.html';
scenarioDefs: any; scenarioList: any;
/** @ngInject **/ /** @ngInject **/
constructor($scope, $injector) { constructor($scope, $injector, private backendSrv) {
super($scope, $injector); super($scope, $injector);
this.target.scenario = this.target.scenario || 'random_walk'; this.target.scenarioId = this.target.scenarioId || 'random_walk';
this.scenarioList = [];
}
this.scenarioDefs = { $onInit() {
'random_walk': {text: 'Random Walk'}, return this.backendSrv.get('/api/tsdb/testdata/scenarios').then(res => {
'no_datapoints': {text: 'No Datapoints'}, this.scenarioList = res;
'data_outside_range': {text: 'Data Outside Range'}, });
};
} }
} }

View File

@ -2,17 +2,17 @@
<div class="gf-form-inline"> <div class="gf-form-inline">
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label query-keyword">Scenario</label> <label class="gf-form-label query-keyword">Scenario</label>
<div class="gf-form-select-wrapper width-20"> <div class="gf-form-select-wrapper width-25">
<select class="gf-form-input width-20" ng-model="ctrl.target.scenario" ng-options="k as v.text for (k, v) in ctrl.scenarioDefs" ng-change="ctrl.refresh()"></select> <select class="gf-form-input width-25" ng-model="ctrl.target.scenarioId" ng-options="v.id as v.name for v in ctrl.scenarioList" ng-change="ctrl.refresh()"></select>
</div> </div>
</div> </div>
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label query-keyword">With Options</label> <label class="gf-form-label query-keyword">With Options</label>
<input type="text" class="gf-form-input" placeholder="optional" ng-model="target.param1" ng-change="ctrl.refresh()" ng-model-onblur> <input type="text" class="gf-form-input max-width-7" placeholder="optional" ng-model="target.param1" ng-change="ctrl.refresh()" ng-model-onblur>
</div> </div>
<div class="gf-form"> <div class="gf-form">
<label class="gf-form-label query-keyword">Alias</label> <label class="gf-form-label query-keyword">Alias</label>
<input type="text" class="gf-form-input" placeholder="optional" ng-model="target.alias" ng-change="ctrl.refresh()" ng-model-onblur> <input type="text" class="gf-form-input max-width-7" placeholder="optional" ng-model="target.alias" ng-change="ctrl.refresh()" ng-model-onblur>
</div> </div>
<div class="gf-form gf-form--grow"> <div class="gf-form gf-form--grow">
<div class="gf-form-label gf-form-label--grow"></div> <div class="gf-form-label gf-form-label--grow"></div>

View File

@ -78,7 +78,7 @@ export class DataProcessor {
} }
timeSeriesHandler(seriesData, index, options) { timeSeriesHandler(seriesData, index, options) {
var datapoints = seriesData.datapoints; var datapoints = seriesData.datapoints || [];
var alias = seriesData.target; var alias = seriesData.target;
var colorIndex = index % colors.length; var colorIndex = index % colors.length;