mirror of
https://github.com/grafana/grafana.git
synced 2025-08-06 05:30:12 +08:00
Elastic: Create legacy runner for proxy queries (#67397)
* Elastic: Rename request to legacy request and call directly * Apply content header directly in request * Move legacy running queries to legacyqueryrunner
This commit is contained in:
@ -3979,6 +3979,21 @@ exports[`better eslint`] = {
|
|||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "3"]
|
||||||
],
|
],
|
||||||
|
"public/app/plugins/datasource/elasticsearch/LegacyQueryRunner.ts:5381": [
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "2"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "4"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "6"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "8"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "9"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "10"],
|
||||||
|
[0, 0, 0, "Do not use any type assertions.", "11"],
|
||||||
|
[0, 0, 0, "Unexpected any. Specify a different type.", "12"]
|
||||||
|
],
|
||||||
"public/app/plugins/datasource/elasticsearch/QueryBuilder.test.ts:5381": [
|
"public/app/plugins/datasource/elasticsearch/QueryBuilder.test.ts:5381": [
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||||
],
|
],
|
||||||
@ -4056,22 +4071,7 @@ exports[`better eslint`] = {
|
|||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "12"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "13"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "14"],
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "15"],
|
[0, 0, 0, "Unexpected any. Specify a different type.", "15"]
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "16"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "17"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "18"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "19"],
|
|
||||||
[0, 0, 0, "Do not use any type assertions.", "20"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "21"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "22"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "23"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "24"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "25"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "26"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "27"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "28"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "29"],
|
|
||||||
[0, 0, 0, "Unexpected any. Specify a different type.", "30"]
|
|
||||||
],
|
],
|
||||||
"public/app/plugins/datasource/elasticsearch/hooks/useStatelessReducer.ts:5381": [
|
"public/app/plugins/datasource/elasticsearch/hooks/useStatelessReducer.ts:5381": [
|
||||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||||
|
407
public/app/plugins/datasource/elasticsearch/LegacyQueryRunner.ts
Normal file
407
public/app/plugins/datasource/elasticsearch/LegacyQueryRunner.ts
Normal file
@ -0,0 +1,407 @@
|
|||||||
|
import { isNumber, isString, first as _first, cloneDeep } from 'lodash';
|
||||||
|
import { lastValueFrom, Observable, of, throwError } from 'rxjs';
|
||||||
|
import { catchError, map, tap } from 'rxjs/operators';
|
||||||
|
|
||||||
|
import {
|
||||||
|
DataFrame,
|
||||||
|
DataQueryRequest,
|
||||||
|
DataQueryResponse,
|
||||||
|
dateTime,
|
||||||
|
Field,
|
||||||
|
LogRowContextOptions,
|
||||||
|
LogRowContextQueryDirection,
|
||||||
|
LogRowModel,
|
||||||
|
toUtc,
|
||||||
|
} from '@grafana/data';
|
||||||
|
import { BackendSrvRequest, getBackendSrv, TemplateSrv } from '@grafana/runtime';
|
||||||
|
|
||||||
|
import { ElasticResponse } from './ElasticResponse';
|
||||||
|
import { ElasticDatasource, enhanceDataFrame } from './datasource';
|
||||||
|
import { defaultBucketAgg, hasMetricOfType } from './queryDef';
|
||||||
|
import { trackAnnotationQuery, trackQuery } from './tracking';
|
||||||
|
import { ElasticsearchQuery, Logs } from './types';
|
||||||
|
|
||||||
|
export class LegacyQueryRunner {
|
||||||
|
datasource: ElasticDatasource;
|
||||||
|
templateSrv: TemplateSrv;
|
||||||
|
|
||||||
|
constructor(datasource: ElasticDatasource, templateSrv: TemplateSrv) {
|
||||||
|
this.datasource = datasource;
|
||||||
|
this.templateSrv = templateSrv;
|
||||||
|
}
|
||||||
|
|
||||||
|
request(
|
||||||
|
method: string,
|
||||||
|
url: string,
|
||||||
|
data?: BackendSrvRequest['data'],
|
||||||
|
headers?: BackendSrvRequest['headers']
|
||||||
|
): Observable<any> {
|
||||||
|
if (!this.datasource.isProxyAccess) {
|
||||||
|
const error = new Error(
|
||||||
|
'Browser access mode in the Elasticsearch datasource is no longer available. Switch to server access mode.'
|
||||||
|
);
|
||||||
|
return throwError(() => error);
|
||||||
|
}
|
||||||
|
|
||||||
|
const options: BackendSrvRequest = {
|
||||||
|
url: this.datasource.url + '/' + url,
|
||||||
|
method,
|
||||||
|
data,
|
||||||
|
headers,
|
||||||
|
};
|
||||||
|
|
||||||
|
if (method === 'POST') {
|
||||||
|
options.headers = options.headers ?? {};
|
||||||
|
options.headers['Content-Type'] = 'application/x-ndjson';
|
||||||
|
}
|
||||||
|
|
||||||
|
if (this.datasource.basicAuth || this.datasource.withCredentials) {
|
||||||
|
options.withCredentials = true;
|
||||||
|
}
|
||||||
|
if (this.datasource.basicAuth) {
|
||||||
|
options.headers = {
|
||||||
|
Authorization: this.datasource.basicAuth,
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
return getBackendSrv()
|
||||||
|
.fetch<any>(options)
|
||||||
|
.pipe(
|
||||||
|
map((results) => {
|
||||||
|
results.data.$$config = results.config;
|
||||||
|
return results.data;
|
||||||
|
}),
|
||||||
|
catchError((err) => {
|
||||||
|
if (err.data) {
|
||||||
|
const message = err.data.error?.reason ?? err.data.message ?? 'Unknown error';
|
||||||
|
|
||||||
|
return throwError({
|
||||||
|
message: 'Elasticsearch error: ' + message,
|
||||||
|
error: err.data.error,
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
return throwError(err);
|
||||||
|
})
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
annotationQuery(options: any) {
|
||||||
|
const annotation = options.annotation;
|
||||||
|
const timeField = annotation.timeField || '@timestamp';
|
||||||
|
const timeEndField = annotation.timeEndField || null;
|
||||||
|
|
||||||
|
// the `target.query` is the "new" location for the query.
|
||||||
|
// normally we would write this code as
|
||||||
|
// try-the-new-place-then-try-the-old-place,
|
||||||
|
// but we had the bug at
|
||||||
|
// https://github.com/grafana/grafana/issues/61107
|
||||||
|
// that may have stored annotations where
|
||||||
|
// both the old and the new place are set,
|
||||||
|
// and in that scenario the old place needs
|
||||||
|
// to have priority.
|
||||||
|
const queryString = annotation.query ?? annotation.target?.query;
|
||||||
|
const tagsField = annotation.tagsField || 'tags';
|
||||||
|
const textField = annotation.textField || null;
|
||||||
|
|
||||||
|
const dateRanges = [];
|
||||||
|
const rangeStart: any = {};
|
||||||
|
rangeStart[timeField] = {
|
||||||
|
from: options.range.from.valueOf(),
|
||||||
|
to: options.range.to.valueOf(),
|
||||||
|
format: 'epoch_millis',
|
||||||
|
};
|
||||||
|
dateRanges.push({ range: rangeStart });
|
||||||
|
|
||||||
|
if (timeEndField) {
|
||||||
|
const rangeEnd: any = {};
|
||||||
|
rangeEnd[timeEndField] = {
|
||||||
|
from: options.range.from.valueOf(),
|
||||||
|
to: options.range.to.valueOf(),
|
||||||
|
format: 'epoch_millis',
|
||||||
|
};
|
||||||
|
dateRanges.push({ range: rangeEnd });
|
||||||
|
}
|
||||||
|
|
||||||
|
const queryInterpolated = this.datasource.interpolateLuceneQuery(queryString);
|
||||||
|
const query: any = {
|
||||||
|
bool: {
|
||||||
|
filter: [
|
||||||
|
{
|
||||||
|
bool: {
|
||||||
|
should: dateRanges,
|
||||||
|
minimum_should_match: 1,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
};
|
||||||
|
|
||||||
|
if (queryInterpolated) {
|
||||||
|
query.bool.filter.push({
|
||||||
|
query_string: {
|
||||||
|
query: queryInterpolated,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
}
|
||||||
|
const data: any = {
|
||||||
|
query,
|
||||||
|
size: 10000,
|
||||||
|
};
|
||||||
|
|
||||||
|
const header: any = {
|
||||||
|
search_type: 'query_then_fetch',
|
||||||
|
ignore_unavailable: true,
|
||||||
|
};
|
||||||
|
|
||||||
|
// @deprecated
|
||||||
|
// Field annotation.index is deprecated and will be removed in the future
|
||||||
|
if (annotation.index) {
|
||||||
|
header.index = annotation.index;
|
||||||
|
} else {
|
||||||
|
header.index = this.datasource.indexPattern.getIndexList(options.range.from, options.range.to);
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = JSON.stringify(header) + '\n' + JSON.stringify(data) + '\n';
|
||||||
|
|
||||||
|
trackAnnotationQuery(annotation);
|
||||||
|
return lastValueFrom(
|
||||||
|
this.request('POST', '_msearch', payload).pipe(
|
||||||
|
map((res) => {
|
||||||
|
const list = [];
|
||||||
|
const hits = res.responses[0].hits.hits;
|
||||||
|
|
||||||
|
const getFieldFromSource = (source: any, fieldName: any) => {
|
||||||
|
if (!fieldName) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const fieldNames = fieldName.split('.');
|
||||||
|
let fieldValue = source;
|
||||||
|
|
||||||
|
for (let i = 0; i < fieldNames.length; i++) {
|
||||||
|
fieldValue = fieldValue[fieldNames[i]];
|
||||||
|
if (!fieldValue) {
|
||||||
|
console.log('could not find field in annotation: ', fieldName);
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return fieldValue;
|
||||||
|
};
|
||||||
|
|
||||||
|
for (let i = 0; i < hits.length; i++) {
|
||||||
|
const source = hits[i]._source;
|
||||||
|
let time = getFieldFromSource(source, timeField);
|
||||||
|
if (typeof hits[i].fields !== 'undefined') {
|
||||||
|
const fields = hits[i].fields;
|
||||||
|
if (isString(fields[timeField]) || isNumber(fields[timeField])) {
|
||||||
|
time = fields[timeField];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const event: {
|
||||||
|
annotation: any;
|
||||||
|
time: number;
|
||||||
|
timeEnd?: number;
|
||||||
|
text: string;
|
||||||
|
tags: string | string[];
|
||||||
|
} = {
|
||||||
|
annotation: annotation,
|
||||||
|
time: toUtc(time).valueOf(),
|
||||||
|
text: getFieldFromSource(source, textField),
|
||||||
|
tags: getFieldFromSource(source, tagsField),
|
||||||
|
};
|
||||||
|
|
||||||
|
if (timeEndField) {
|
||||||
|
const timeEnd = getFieldFromSource(source, timeEndField);
|
||||||
|
if (timeEnd) {
|
||||||
|
event.timeEnd = toUtc(timeEnd).valueOf();
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// legacy support for title field
|
||||||
|
if (annotation.titleField) {
|
||||||
|
const title = getFieldFromSource(source, annotation.titleField);
|
||||||
|
if (title) {
|
||||||
|
event.text = title + '\n' + event.text;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof event.tags === 'string') {
|
||||||
|
event.tags = event.tags.split(',');
|
||||||
|
}
|
||||||
|
|
||||||
|
list.push(event);
|
||||||
|
}
|
||||||
|
return list;
|
||||||
|
})
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
async logContextQuery(row: LogRowModel, options?: LogRowContextOptions): Promise<{ data: DataFrame[] }> {
|
||||||
|
const sortField = row.dataFrame.fields.find((f) => f.name === 'sort');
|
||||||
|
const searchAfter = sortField?.values[row.rowIndex] || [row.timeEpochMs];
|
||||||
|
const sort = options?.direction === LogRowContextQueryDirection.Forward ? 'asc' : 'desc';
|
||||||
|
|
||||||
|
const header =
|
||||||
|
options?.direction === LogRowContextQueryDirection.Forward
|
||||||
|
? this.datasource.getQueryHeader('query_then_fetch', dateTime(row.timeEpochMs))
|
||||||
|
: this.datasource.getQueryHeader('query_then_fetch', undefined, dateTime(row.timeEpochMs));
|
||||||
|
|
||||||
|
const limit = options?.limit ?? 10;
|
||||||
|
const esQuery = JSON.stringify({
|
||||||
|
size: limit,
|
||||||
|
query: {
|
||||||
|
bool: {
|
||||||
|
filter: [
|
||||||
|
{
|
||||||
|
range: {
|
||||||
|
[this.datasource.timeField]: {
|
||||||
|
[options?.direction === LogRowContextQueryDirection.Forward ? 'gte' : 'lte']: row.timeEpochMs,
|
||||||
|
format: 'epoch_millis',
|
||||||
|
},
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
},
|
||||||
|
},
|
||||||
|
sort: [{ [this.datasource.timeField]: sort }, { _doc: sort }],
|
||||||
|
search_after: searchAfter,
|
||||||
|
});
|
||||||
|
const payload = [header, esQuery].join('\n') + '\n';
|
||||||
|
const url = this.datasource.getMultiSearchUrl();
|
||||||
|
const response = await lastValueFrom(this.request('POST', url, payload));
|
||||||
|
const targets: ElasticsearchQuery[] = [{ refId: `${row.dataFrame.refId}`, metrics: [{ type: 'logs', id: '1' }] }];
|
||||||
|
const elasticResponse = new ElasticResponse(targets, transformHitsBasedOnDirection(response, sort));
|
||||||
|
const logResponse = elasticResponse.getLogs(this.datasource.logMessageField, this.datasource.logLevelField);
|
||||||
|
const dataFrame = _first(logResponse.data);
|
||||||
|
if (!dataFrame) {
|
||||||
|
return { data: [] };
|
||||||
|
}
|
||||||
|
/**
|
||||||
|
* The LogRowContext requires there is a field in the dataFrame.fields
|
||||||
|
* named `ts` for timestamp and `line` for the actual log line to display.
|
||||||
|
* Unfortunatly these fields are hardcoded and are required for the lines to
|
||||||
|
* be properly displayed. This code just copies the fields based on this.timeField
|
||||||
|
* and this.logMessageField and recreates the dataFrame so it works.
|
||||||
|
*/
|
||||||
|
const timestampField = dataFrame.fields.find((f: Field) => f.name === this.datasource.timeField);
|
||||||
|
const lineField = dataFrame.fields.find((f: Field) => f.name === this.datasource.logMessageField);
|
||||||
|
if (timestampField && lineField) {
|
||||||
|
return {
|
||||||
|
data: [
|
||||||
|
{
|
||||||
|
...dataFrame,
|
||||||
|
fields: [...dataFrame.fields, { ...timestampField, name: 'ts' }, { ...lineField, name: 'line' }],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
||||||
|
return logResponse;
|
||||||
|
}
|
||||||
|
|
||||||
|
query(request: DataQueryRequest<ElasticsearchQuery>): Observable<DataQueryResponse> {
|
||||||
|
let payload = '';
|
||||||
|
const targets = this.datasource.interpolateVariablesInQueries(cloneDeep(request.targets), request.scopedVars);
|
||||||
|
const sentTargets: ElasticsearchQuery[] = [];
|
||||||
|
let targetsContainsLogsQuery = targets.some((target) => hasMetricOfType(target, 'logs'));
|
||||||
|
|
||||||
|
const logLimits: Array<number | undefined> = [];
|
||||||
|
|
||||||
|
for (const target of targets) {
|
||||||
|
if (target.hide) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
let queryObj;
|
||||||
|
if (hasMetricOfType(target, 'logs')) {
|
||||||
|
// FIXME: All this logic here should be in the query builder.
|
||||||
|
// When moving to the BE-only implementation we should remove this and let the BE
|
||||||
|
// Handle this.
|
||||||
|
// TODO: defaultBucketAgg creates a dete_histogram aggregation without a field, so it fallbacks to
|
||||||
|
// the configured timeField. we should allow people to use a different time field here.
|
||||||
|
target.bucketAggs = [defaultBucketAgg()];
|
||||||
|
|
||||||
|
const log = target.metrics?.find((m) => m.type === 'logs') as Logs;
|
||||||
|
const limit = log.settings?.limit ? parseInt(log.settings?.limit, 10) : 500;
|
||||||
|
logLimits.push(limit);
|
||||||
|
|
||||||
|
target.metrics = [];
|
||||||
|
// Setting this for metrics queries that are typed as logs
|
||||||
|
queryObj = this.datasource.queryBuilder.getLogsQuery(target, limit);
|
||||||
|
} else {
|
||||||
|
logLimits.push();
|
||||||
|
if (target.alias) {
|
||||||
|
target.alias = this.datasource.interpolateLuceneQuery(target.alias, request.scopedVars);
|
||||||
|
}
|
||||||
|
|
||||||
|
queryObj = this.datasource.queryBuilder.build(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
const esQuery = JSON.stringify(queryObj);
|
||||||
|
|
||||||
|
const searchType = 'query_then_fetch';
|
||||||
|
const header = this.datasource.getQueryHeader(searchType, request.range.from, request.range.to);
|
||||||
|
payload += header + '\n';
|
||||||
|
|
||||||
|
payload += esQuery + '\n';
|
||||||
|
|
||||||
|
sentTargets.push(target);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (sentTargets.length === 0) {
|
||||||
|
return of({ data: [] });
|
||||||
|
}
|
||||||
|
|
||||||
|
// We replace the range here for actual values. We need to replace it together with enclosing "" so that we replace
|
||||||
|
// it as an integer not as string with digits. This is because elastic will convert the string only if the time
|
||||||
|
// field is specified as type date (which probably should) but can also be specified as integer (millisecond epoch)
|
||||||
|
// and then sending string will error out.
|
||||||
|
payload = payload.replace(/"\$timeFrom"/g, request.range.from.valueOf().toString());
|
||||||
|
payload = payload.replace(/"\$timeTo"/g, request.range.to.valueOf().toString());
|
||||||
|
payload = this.templateSrv.replace(payload, request.scopedVars);
|
||||||
|
|
||||||
|
const url = this.datasource.getMultiSearchUrl();
|
||||||
|
|
||||||
|
const start = new Date();
|
||||||
|
return this.request('POST', url, payload).pipe(
|
||||||
|
map((res) => {
|
||||||
|
const er = new ElasticResponse(sentTargets, res);
|
||||||
|
|
||||||
|
// TODO: This needs to be revisited, it seems wrong to process ALL the sent queries as logs if only one of them was a log query
|
||||||
|
if (targetsContainsLogsQuery) {
|
||||||
|
const response = er.getLogs(this.datasource.logMessageField, this.datasource.logLevelField);
|
||||||
|
|
||||||
|
response.data.forEach((dataFrame, index) => {
|
||||||
|
enhanceDataFrame(dataFrame, this.datasource.dataLinks, logLimits[index]);
|
||||||
|
});
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
return er.getTimeSeries();
|
||||||
|
}),
|
||||||
|
tap((response) => trackQuery(response, request, start))
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function transformHitsBasedOnDirection(response: any, direction: 'asc' | 'desc') {
|
||||||
|
if (direction === 'desc') {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
const actualResponse = response.responses[0];
|
||||||
|
return {
|
||||||
|
...response,
|
||||||
|
responses: [
|
||||||
|
{
|
||||||
|
...actualResponse,
|
||||||
|
hits: {
|
||||||
|
...actualResponse.hits,
|
||||||
|
hits: actualResponse.hits.hits.reverse(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
}
|
@ -860,13 +860,14 @@ describe('ElasticDatasource', () => {
|
|||||||
describe('query', () => {
|
describe('query', () => {
|
||||||
it('should replace range as integer not string', async () => {
|
it('should replace range as integer not string', async () => {
|
||||||
const { ds } = getTestContext({ jsonData: { interval: 'Daily', timeField: '@time' } });
|
const { ds } = getTestContext({ jsonData: { interval: 'Daily', timeField: '@time' } });
|
||||||
const postMock = jest.fn((url: string, data) => of(createFetchResponse({ responses: [] })));
|
const postMock = jest.fn((method: string, url: string, data, header: object) =>
|
||||||
ds['post'] = postMock;
|
of(createFetchResponse({ responses: [] }))
|
||||||
|
);
|
||||||
|
ds.legacyQueryRunner['request'] = postMock;
|
||||||
|
|
||||||
await expect(ds.query(createElasticQuery())).toEmitValuesWith((received) => {
|
await expect(ds.query(createElasticQuery())).toEmitValuesWith((received) => {
|
||||||
expect(postMock).toHaveBeenCalledTimes(1);
|
expect(postMock).toHaveBeenCalledTimes(1);
|
||||||
|
const query = postMock.mock.calls[0][2];
|
||||||
const query = postMock.mock.calls[0][1];
|
|
||||||
expect(typeof JSON.parse(query.split('\n')[1]).query.bool.filter[0].range['@time'].gte).toBe('number');
|
expect(typeof JSON.parse(query.split('\n')[1]).query.bool.filter[0].range['@time'].gte).toBe('number');
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
import { cloneDeep, find, first as _first, isNumber, isObject, isString, map as _map } from 'lodash';
|
import { cloneDeep, find, first as _first, isObject, isString, map as _map } from 'lodash';
|
||||||
import { generate, lastValueFrom, Observable, of, throwError } from 'rxjs';
|
import { generate, lastValueFrom, Observable, of } from 'rxjs';
|
||||||
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty, tap } from 'rxjs/operators';
|
import { catchError, first, map, mergeMap, skipWhile, throwIfEmpty, tap } from 'rxjs/operators';
|
||||||
import { SemVer } from 'semver';
|
import { SemVer } from 'semver';
|
||||||
|
|
||||||
@ -21,27 +21,25 @@ import {
|
|||||||
MetricFindValue,
|
MetricFindValue,
|
||||||
ScopedVars,
|
ScopedVars,
|
||||||
TimeRange,
|
TimeRange,
|
||||||
toUtc,
|
|
||||||
QueryFixAction,
|
QueryFixAction,
|
||||||
CoreApp,
|
CoreApp,
|
||||||
SupplementaryQueryType,
|
SupplementaryQueryType,
|
||||||
SupplementaryQueryOptions,
|
|
||||||
DataQueryError,
|
DataQueryError,
|
||||||
rangeUtil,
|
rangeUtil,
|
||||||
Field,
|
|
||||||
LogRowContextQueryDirection,
|
LogRowContextQueryDirection,
|
||||||
LogRowContextOptions,
|
LogRowContextOptions,
|
||||||
|
SupplementaryQueryOptions,
|
||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import { BackendSrvRequest, DataSourceWithBackend, getBackendSrv, getDataSourceSrv, config } from '@grafana/runtime';
|
import { DataSourceWithBackend, getDataSourceSrv, config } from '@grafana/runtime';
|
||||||
import { queryLogsVolume } from 'app/core/logsModel';
|
import { queryLogsVolume } from 'app/core/logsModel';
|
||||||
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
import { getTimeSrv, TimeSrv } from 'app/features/dashboard/services/TimeSrv';
|
||||||
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_srv';
|
||||||
|
|
||||||
import { getLogLevelFromKey } from '../../../features/logs/utils';
|
import { getLogLevelFromKey } from '../../../features/logs/utils';
|
||||||
|
|
||||||
import { ElasticResponse } from './ElasticResponse';
|
|
||||||
import { IndexPattern, intervalMap } from './IndexPattern';
|
import { IndexPattern, intervalMap } from './IndexPattern';
|
||||||
import LanguageProvider from './LanguageProvider';
|
import LanguageProvider from './LanguageProvider';
|
||||||
|
import { LegacyQueryRunner } from './LegacyQueryRunner';
|
||||||
import { ElasticQueryBuilder } from './QueryBuilder';
|
import { ElasticQueryBuilder } from './QueryBuilder';
|
||||||
import { ElasticsearchAnnotationsQueryEditor } from './components/QueryEditor/AnnotationQueryEditor';
|
import { ElasticsearchAnnotationsQueryEditor } from './components/QueryEditor/AnnotationQueryEditor';
|
||||||
import { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations';
|
import { isBucketAggregationWithField } from './components/QueryEditor/BucketAggregationsEditor/aggregations';
|
||||||
@ -51,8 +49,7 @@ import {
|
|||||||
isPipelineAggregationWithMultipleBucketPaths,
|
isPipelineAggregationWithMultipleBucketPaths,
|
||||||
} from './components/QueryEditor/MetricAggregationsEditor/aggregations';
|
} from './components/QueryEditor/MetricAggregationsEditor/aggregations';
|
||||||
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
|
import { metricAggregationConfig } from './components/QueryEditor/MetricAggregationsEditor/utils';
|
||||||
import { defaultBucketAgg, hasMetricOfType } from './queryDef';
|
import { trackQuery } from './tracking';
|
||||||
import { trackAnnotationQuery, trackQuery } from './tracking';
|
|
||||||
import {
|
import {
|
||||||
Logs,
|
Logs,
|
||||||
BucketAggregation,
|
BucketAggregation,
|
||||||
@ -106,6 +103,7 @@ export class ElasticDatasource
|
|||||||
isProxyAccess: boolean;
|
isProxyAccess: boolean;
|
||||||
timeSrv: TimeSrv;
|
timeSrv: TimeSrv;
|
||||||
databaseVersion: SemVer | null;
|
databaseVersion: SemVer | null;
|
||||||
|
legacyQueryRunner: LegacyQueryRunner;
|
||||||
|
|
||||||
constructor(
|
constructor(
|
||||||
instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions>,
|
instanceSettings: DataSourceInstanceSettings<ElasticsearchOptions>,
|
||||||
@ -147,57 +145,7 @@ export class ElasticDatasource
|
|||||||
}
|
}
|
||||||
this.languageProvider = new LanguageProvider(this);
|
this.languageProvider = new LanguageProvider(this);
|
||||||
this.timeSrv = getTimeSrv();
|
this.timeSrv = getTimeSrv();
|
||||||
}
|
this.legacyQueryRunner = new LegacyQueryRunner(this, this.templateSrv);
|
||||||
|
|
||||||
private request(
|
|
||||||
method: string,
|
|
||||||
url: string,
|
|
||||||
data?: undefined,
|
|
||||||
headers?: BackendSrvRequest['headers']
|
|
||||||
): Observable<any> {
|
|
||||||
if (!this.isProxyAccess) {
|
|
||||||
const error = new Error(
|
|
||||||
'Browser access mode in the Elasticsearch datasource is no longer available. Switch to server access mode.'
|
|
||||||
);
|
|
||||||
return throwError(() => error);
|
|
||||||
}
|
|
||||||
|
|
||||||
const options: BackendSrvRequest = {
|
|
||||||
url: this.url + '/' + url,
|
|
||||||
method,
|
|
||||||
data,
|
|
||||||
headers,
|
|
||||||
};
|
|
||||||
|
|
||||||
if (this.basicAuth || this.withCredentials) {
|
|
||||||
options.withCredentials = true;
|
|
||||||
}
|
|
||||||
if (this.basicAuth) {
|
|
||||||
options.headers = {
|
|
||||||
Authorization: this.basicAuth,
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
return getBackendSrv()
|
|
||||||
.fetch<any>(options)
|
|
||||||
.pipe(
|
|
||||||
map((results) => {
|
|
||||||
results.data.$$config = results.config;
|
|
||||||
return results.data;
|
|
||||||
}),
|
|
||||||
catchError((err) => {
|
|
||||||
if (err.data) {
|
|
||||||
const message = err.data.error?.reason ?? err.data.message ?? 'Unknown error';
|
|
||||||
|
|
||||||
return throwError({
|
|
||||||
message: 'Elasticsearch error: ' + message,
|
|
||||||
error: err.data.error,
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
return throwError(err);
|
|
||||||
})
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<ElasticsearchQuery[]> {
|
async importFromAbstractQueries(abstractQueries: AbstractQuery[]): Promise<ElasticsearchQuery[]> {
|
||||||
@ -212,7 +160,8 @@ export class ElasticDatasource
|
|||||||
*
|
*
|
||||||
* @param url the url to query the index on, for example `/_mapping`.
|
* @param url the url to query the index on, for example `/_mapping`.
|
||||||
*/
|
*/
|
||||||
private get(url: string, range = getDefaultTimeRange()): Observable<any> {
|
|
||||||
|
private requestAllIndices(url: string, range = getDefaultTimeRange()): Observable<any> {
|
||||||
let indexList = this.indexPattern.getIndexList(range.from, range.to);
|
let indexList = this.indexPattern.getIndexList(range.from, range.to);
|
||||||
if (!Array.isArray(indexList)) {
|
if (!Array.isArray(indexList)) {
|
||||||
indexList = [this.indexPattern.getIndexForToday()];
|
indexList = [this.indexPattern.getIndexForToday()];
|
||||||
@ -220,12 +169,8 @@ export class ElasticDatasource
|
|||||||
|
|
||||||
const indexUrlList = indexList.map((index) => index + url);
|
const indexUrlList = indexList.map((index) => index + url);
|
||||||
|
|
||||||
return this.requestAllIndices(indexUrlList);
|
|
||||||
}
|
|
||||||
|
|
||||||
private requestAllIndices(indexList: string[]): Observable<any> {
|
|
||||||
const maxTraversals = 7; // do not go beyond one week (for a daily pattern)
|
const maxTraversals = 7; // do not go beyond one week (for a daily pattern)
|
||||||
const listLen = indexList.length;
|
const listLen = indexUrlList.length;
|
||||||
|
|
||||||
return generate({
|
return generate({
|
||||||
initialState: 0,
|
initialState: 0,
|
||||||
@ -234,7 +179,9 @@ export class ElasticDatasource
|
|||||||
}).pipe(
|
}).pipe(
|
||||||
mergeMap((index) => {
|
mergeMap((index) => {
|
||||||
// catch all errors and emit an object with an err property to simplify checks later in the pipeline
|
// catch all errors and emit an object with an err property to simplify checks later in the pipeline
|
||||||
return this.request('GET', indexList[listLen - index - 1]).pipe(catchError((err) => of({ err })));
|
return this.legacyQueryRunner
|
||||||
|
.request('GET', indexUrlList[listLen - index - 1])
|
||||||
|
.pipe(catchError((err) => of({ err })));
|
||||||
}),
|
}),
|
||||||
skipWhile((resp) => resp?.err?.status === 404), // skip all requests that fail because missing Elastic index
|
skipWhile((resp) => resp?.err?.status === 404), // skip all requests that fail because missing Elastic index
|
||||||
throwIfEmpty(() => 'Could not find an available index for this time range.'), // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index
|
throwIfEmpty(() => 'Could not find an available index for this time range.'), // when i === Math.min(listLen, maxTraversals) generate will complete but without emitting any values which means we didn't find a valid index
|
||||||
@ -249,165 +196,11 @@ export class ElasticDatasource
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
private post(url: string, data: any): Observable<any> {
|
|
||||||
return this.request('POST', url, data, { 'Content-Type': 'application/x-ndjson' });
|
|
||||||
}
|
|
||||||
|
|
||||||
annotationQuery(options: any): Promise<any> {
|
annotationQuery(options: any): Promise<any> {
|
||||||
const annotation = options.annotation;
|
return this.legacyQueryRunner.annotationQuery(options);
|
||||||
const timeField = annotation.timeField || '@timestamp';
|
|
||||||
const timeEndField = annotation.timeEndField || null;
|
|
||||||
|
|
||||||
// the `target.query` is the "new" location for the query.
|
|
||||||
// normally we would write this code as
|
|
||||||
// try-the-new-place-then-try-the-old-place,
|
|
||||||
// but we had the bug at
|
|
||||||
// https://github.com/grafana/grafana/issues/61107
|
|
||||||
// that may have stored annotations where
|
|
||||||
// both the old and the new place are set,
|
|
||||||
// and in that scenario the old place needs
|
|
||||||
// to have priority.
|
|
||||||
const queryString = annotation.query ?? annotation.target?.query;
|
|
||||||
const tagsField = annotation.tagsField || 'tags';
|
|
||||||
const textField = annotation.textField || null;
|
|
||||||
|
|
||||||
const dateRanges = [];
|
|
||||||
const rangeStart: any = {};
|
|
||||||
rangeStart[timeField] = {
|
|
||||||
from: options.range.from.valueOf(),
|
|
||||||
to: options.range.to.valueOf(),
|
|
||||||
format: 'epoch_millis',
|
|
||||||
};
|
|
||||||
dateRanges.push({ range: rangeStart });
|
|
||||||
|
|
||||||
if (timeEndField) {
|
|
||||||
const rangeEnd: any = {};
|
|
||||||
rangeEnd[timeEndField] = {
|
|
||||||
from: options.range.from.valueOf(),
|
|
||||||
to: options.range.to.valueOf(),
|
|
||||||
format: 'epoch_millis',
|
|
||||||
};
|
|
||||||
dateRanges.push({ range: rangeEnd });
|
|
||||||
}
|
|
||||||
|
|
||||||
const queryInterpolated = this.interpolateLuceneQuery(queryString);
|
|
||||||
const query: any = {
|
|
||||||
bool: {
|
|
||||||
filter: [
|
|
||||||
{
|
|
||||||
bool: {
|
|
||||||
should: dateRanges,
|
|
||||||
minimum_should_match: 1,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
};
|
|
||||||
|
|
||||||
if (queryInterpolated) {
|
|
||||||
query.bool.filter.push({
|
|
||||||
query_string: {
|
|
||||||
query: queryInterpolated,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
}
|
|
||||||
const data: any = {
|
|
||||||
query,
|
|
||||||
size: 10000,
|
|
||||||
};
|
|
||||||
|
|
||||||
const header: any = {
|
|
||||||
search_type: 'query_then_fetch',
|
|
||||||
ignore_unavailable: true,
|
|
||||||
};
|
|
||||||
|
|
||||||
// @deprecated
|
|
||||||
// Field annotation.index is deprecated and will be removed in the future
|
|
||||||
if (annotation.index) {
|
|
||||||
header.index = annotation.index;
|
|
||||||
} else {
|
|
||||||
header.index = this.indexPattern.getIndexList(options.range.from, options.range.to);
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = JSON.stringify(header) + '\n' + JSON.stringify(data) + '\n';
|
|
||||||
|
|
||||||
trackAnnotationQuery(annotation);
|
|
||||||
return lastValueFrom(
|
|
||||||
this.post('_msearch', payload).pipe(
|
|
||||||
map((res) => {
|
|
||||||
const list = [];
|
|
||||||
const hits = res.responses[0].hits.hits;
|
|
||||||
|
|
||||||
const getFieldFromSource = (source: any, fieldName: any) => {
|
|
||||||
if (!fieldName) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const fieldNames = fieldName.split('.');
|
|
||||||
let fieldValue = source;
|
|
||||||
|
|
||||||
for (let i = 0; i < fieldNames.length; i++) {
|
|
||||||
fieldValue = fieldValue[fieldNames[i]];
|
|
||||||
if (!fieldValue) {
|
|
||||||
console.log('could not find field in annotation: ', fieldName);
|
|
||||||
return '';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return fieldValue;
|
|
||||||
};
|
|
||||||
|
|
||||||
for (let i = 0; i < hits.length; i++) {
|
|
||||||
const source = hits[i]._source;
|
|
||||||
let time = getFieldFromSource(source, timeField);
|
|
||||||
if (typeof hits[i].fields !== 'undefined') {
|
|
||||||
const fields = hits[i].fields;
|
|
||||||
if (isString(fields[timeField]) || isNumber(fields[timeField])) {
|
|
||||||
time = fields[timeField];
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const event: {
|
|
||||||
annotation: any;
|
|
||||||
time: number;
|
|
||||||
timeEnd?: number;
|
|
||||||
text: string;
|
|
||||||
tags: string | string[];
|
|
||||||
} = {
|
|
||||||
annotation: annotation,
|
|
||||||
time: toUtc(time).valueOf(),
|
|
||||||
text: getFieldFromSource(source, textField),
|
|
||||||
tags: getFieldFromSource(source, tagsField),
|
|
||||||
};
|
|
||||||
|
|
||||||
if (timeEndField) {
|
|
||||||
const timeEnd = getFieldFromSource(source, timeEndField);
|
|
||||||
if (timeEnd) {
|
|
||||||
event.timeEnd = toUtc(timeEnd).valueOf();
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// legacy support for title tield
|
|
||||||
if (annotation.titleField) {
|
|
||||||
const title = getFieldFromSource(source, annotation.titleField);
|
|
||||||
if (title) {
|
|
||||||
event.text = title + '\n' + event.text;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if (typeof event.tags === 'string') {
|
|
||||||
event.tags = event.tags.split(',');
|
|
||||||
}
|
|
||||||
|
|
||||||
list.push(event);
|
|
||||||
}
|
|
||||||
return list;
|
|
||||||
})
|
|
||||||
)
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
private interpolateLuceneQuery(queryString: string, scopedVars?: ScopedVars) {
|
interpolateLuceneQuery(queryString: string, scopedVars?: ScopedVars) {
|
||||||
return this.templateSrv.replace(queryString, scopedVars, 'lucene');
|
return this.templateSrv.replace(queryString, scopedVars, 'lucene');
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -529,65 +322,7 @@ export class ElasticDatasource
|
|||||||
)
|
)
|
||||||
);
|
);
|
||||||
} else {
|
} else {
|
||||||
const sortField = row.dataFrame.fields.find((f) => f.name === 'sort');
|
return this.legacyQueryRunner.logContextQuery(row, options);
|
||||||
const searchAfter = sortField?.values[row.rowIndex] || [row.timeEpochMs];
|
|
||||||
const sort = options?.direction === LogRowContextQueryDirection.Forward ? 'asc' : 'desc';
|
|
||||||
|
|
||||||
const header =
|
|
||||||
options?.direction === LogRowContextQueryDirection.Forward
|
|
||||||
? this.getQueryHeader('query_then_fetch', dateTime(row.timeEpochMs))
|
|
||||||
: this.getQueryHeader('query_then_fetch', undefined, dateTime(row.timeEpochMs));
|
|
||||||
|
|
||||||
const limit = options?.limit ?? 10;
|
|
||||||
const esQuery = JSON.stringify({
|
|
||||||
size: limit,
|
|
||||||
query: {
|
|
||||||
bool: {
|
|
||||||
filter: [
|
|
||||||
{
|
|
||||||
range: {
|
|
||||||
[this.timeField]: {
|
|
||||||
[options?.direction === LogRowContextQueryDirection.Forward ? 'gte' : 'lte']: row.timeEpochMs,
|
|
||||||
format: 'epoch_millis',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
},
|
|
||||||
},
|
|
||||||
sort: [{ [this.timeField]: sort }, { _doc: sort }],
|
|
||||||
search_after: searchAfter,
|
|
||||||
});
|
|
||||||
const payload = [header, esQuery].join('\n') + '\n';
|
|
||||||
const url = this.getMultiSearchUrl();
|
|
||||||
const response = await lastValueFrom(this.post(url, payload));
|
|
||||||
const targets: ElasticsearchQuery[] = [{ refId: `${row.dataFrame.refId}`, metrics: [{ type: 'logs', id: '1' }] }];
|
|
||||||
const elasticResponse = new ElasticResponse(targets, transformHitsBasedOnDirection(response, sort));
|
|
||||||
const logResponse = elasticResponse.getLogs(this.logMessageField, this.logLevelField);
|
|
||||||
const dataFrame = _first(logResponse.data);
|
|
||||||
if (!dataFrame) {
|
|
||||||
return { data: [] };
|
|
||||||
}
|
|
||||||
/**
|
|
||||||
* The LogRowContext requires there is a field in the dataFrame.fields
|
|
||||||
* named `ts` for timestamp and `line` for the actual log line to display.
|
|
||||||
* Unfortunatly these fields are hardcoded and are required for the lines to
|
|
||||||
* be properly displayed. This code just copies the fields based on this.timeField
|
|
||||||
* and this.logMessageField and recreates the dataFrame so it works.
|
|
||||||
*/
|
|
||||||
const timestampField = dataFrame.fields.find((f: Field) => f.name === this.timeField);
|
|
||||||
const lineField = dataFrame.fields.find((f: Field) => f.name === this.logMessageField);
|
|
||||||
if (timestampField && lineField) {
|
|
||||||
return {
|
|
||||||
data: [
|
|
||||||
{
|
|
||||||
...dataFrame,
|
|
||||||
fields: [...dataFrame.fields, { ...timestampField, name: 'ts' }, { ...lineField, name: 'line' }],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
return logResponse;
|
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
@ -692,87 +427,7 @@ export class ElasticDatasource
|
|||||||
const start = new Date();
|
const start = new Date();
|
||||||
return super.query(request).pipe(tap((response) => trackQuery(response, request, start)));
|
return super.query(request).pipe(tap((response) => trackQuery(response, request, start)));
|
||||||
}
|
}
|
||||||
let payload = '';
|
return this.legacyQueryRunner.query(request);
|
||||||
const targets = this.interpolateVariablesInQueries(cloneDeep(request.targets), request.scopedVars);
|
|
||||||
const sentTargets: ElasticsearchQuery[] = [];
|
|
||||||
let targetsContainsLogsQuery = targets.some((target) => hasMetricOfType(target, 'logs'));
|
|
||||||
|
|
||||||
const logLimits: Array<number | undefined> = [];
|
|
||||||
|
|
||||||
for (const target of targets) {
|
|
||||||
if (target.hide) {
|
|
||||||
continue;
|
|
||||||
}
|
|
||||||
|
|
||||||
let queryObj;
|
|
||||||
if (hasMetricOfType(target, 'logs')) {
|
|
||||||
// FIXME: All this logic here should be in the query builder.
|
|
||||||
// When moving to the BE-only implementation we should remove this and let the BE
|
|
||||||
// Handle this.
|
|
||||||
// TODO: defaultBucketAgg creates a dete_histogram aggregation without a field, so it fallbacks to
|
|
||||||
// the configured timeField. we should allow people to use a different time field here.
|
|
||||||
target.bucketAggs = [defaultBucketAgg()];
|
|
||||||
|
|
||||||
const log = target.metrics?.find((m) => m.type === 'logs') as Logs;
|
|
||||||
const limit = log.settings?.limit ? parseInt(log.settings?.limit, 10) : 500;
|
|
||||||
logLimits.push(limit);
|
|
||||||
|
|
||||||
target.metrics = [];
|
|
||||||
// Setting this for metrics queries that are typed as logs
|
|
||||||
queryObj = this.queryBuilder.getLogsQuery(target, limit);
|
|
||||||
} else {
|
|
||||||
logLimits.push();
|
|
||||||
if (target.alias) {
|
|
||||||
target.alias = this.interpolateLuceneQuery(target.alias, request.scopedVars);
|
|
||||||
}
|
|
||||||
|
|
||||||
queryObj = this.queryBuilder.build(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
const esQuery = JSON.stringify(queryObj);
|
|
||||||
|
|
||||||
const searchType = 'query_then_fetch';
|
|
||||||
const header = this.getQueryHeader(searchType, request.range.from, request.range.to);
|
|
||||||
payload += header + '\n';
|
|
||||||
|
|
||||||
payload += esQuery + '\n';
|
|
||||||
|
|
||||||
sentTargets.push(target);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (sentTargets.length === 0) {
|
|
||||||
return of({ data: [] });
|
|
||||||
}
|
|
||||||
|
|
||||||
// We replace the range here for actual values. We need to replace it together with enclosing "" so that we replace
|
|
||||||
// it as an integer not as string with digits. This is because elastic will convert the string only if the time
|
|
||||||
// field is specified as type date (which probably should) but can also be specified as integer (millisecond epoch)
|
|
||||||
// and then sending string will error out.
|
|
||||||
payload = payload.replace(/"\$timeFrom"/g, request.range.from.valueOf().toString());
|
|
||||||
payload = payload.replace(/"\$timeTo"/g, request.range.to.valueOf().toString());
|
|
||||||
payload = this.templateSrv.replace(payload, request.scopedVars);
|
|
||||||
|
|
||||||
const url = this.getMultiSearchUrl();
|
|
||||||
|
|
||||||
const start = new Date();
|
|
||||||
return this.post(url, payload).pipe(
|
|
||||||
map((res) => {
|
|
||||||
const er = new ElasticResponse(sentTargets, res);
|
|
||||||
|
|
||||||
// TODO: This needs to be revisited, it seems wrong to process ALL the sent queries as logs if only one of them was a log query
|
|
||||||
if (targetsContainsLogsQuery) {
|
|
||||||
const response = er.getLogs(this.logMessageField, this.logLevelField);
|
|
||||||
|
|
||||||
response.data.forEach((dataFrame, index) => {
|
|
||||||
enhanceDataFrame(dataFrame, this.dataLinks, logLimits[index]);
|
|
||||||
});
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
return er.getTimeSeries();
|
|
||||||
}),
|
|
||||||
tap((response) => trackQuery(response, request, start))
|
|
||||||
);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
isMetadataField(fieldName: string) {
|
isMetadataField(fieldName: string) {
|
||||||
@ -796,7 +451,7 @@ export class ElasticDatasource
|
|||||||
nested: 'nested',
|
nested: 'nested',
|
||||||
histogram: 'number',
|
histogram: 'number',
|
||||||
};
|
};
|
||||||
return this.get('/_mapping', range).pipe(
|
return this.requestAllIndices('/_mapping', range).pipe(
|
||||||
map((result) => {
|
map((result) => {
|
||||||
const shouldAddField = (obj: any, key: string) => {
|
const shouldAddField = (obj: any, key: string) => {
|
||||||
if (this.isMetadataField(key)) {
|
if (this.isMetadataField(key)) {
|
||||||
@ -874,7 +529,7 @@ export class ElasticDatasource
|
|||||||
|
|
||||||
const url = this.getMultiSearchUrl();
|
const url = this.getMultiSearchUrl();
|
||||||
|
|
||||||
return this.post(url, esQuery).pipe(
|
return this.legacyQueryRunner.request('POST', url, esQuery).pipe(
|
||||||
map((res) => {
|
map((res) => {
|
||||||
if (!res.responses[0].aggregations) {
|
if (!res.responses[0].aggregations) {
|
||||||
return [];
|
return [];
|
||||||
@ -1084,7 +739,7 @@ export class ElasticDatasource
|
|||||||
|
|
||||||
private getDatabaseVersionUncached(): Promise<SemVer | null> {
|
private getDatabaseVersionUncached(): Promise<SemVer | null> {
|
||||||
// we want this function to never fail
|
// we want this function to never fail
|
||||||
return lastValueFrom(this.request('GET', '/')).then(
|
return lastValueFrom(this.legacyQueryRunner.request('GET', '/')).then(
|
||||||
(data) => {
|
(data) => {
|
||||||
const versionNumber = data?.version?.number;
|
const versionNumber = data?.version?.number;
|
||||||
if (typeof versionNumber !== 'string') {
|
if (typeof versionNumber !== 'string') {
|
||||||
@ -1212,24 +867,6 @@ function generateDataLink(linkConfig: DataLinkConfig): DataLink {
|
|||||||
};
|
};
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
function transformHitsBasedOnDirection(response: any, direction: 'asc' | 'desc') {
|
|
||||||
if (direction === 'desc') {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
const actualResponse = response.responses[0];
|
|
||||||
return {
|
|
||||||
...response,
|
|
||||||
responses: [
|
|
||||||
{
|
|
||||||
...actualResponse,
|
|
||||||
hits: {
|
|
||||||
...actualResponse.hits,
|
|
||||||
hits: actualResponse.hits.hits.reverse(),
|
|
||||||
},
|
|
||||||
},
|
|
||||||
],
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function createContextTimeRange(rowTimeEpochMs: number, direction: string, intervalPattern: Interval | undefined) {
|
function createContextTimeRange(rowTimeEpochMs: number, direction: string, intervalPattern: Interval | undefined) {
|
||||||
const offset = 7;
|
const offset = 7;
|
||||||
|
Reference in New Issue
Block a user