mirror of
https://github.com/grafana/grafana.git
synced 2025-08-06 20:59:35 +08:00
Loki: Remove unnecessary deduplication (#29421)
* Remove unnecessary deduplication * Remove dedup test as we are not doing dedup on all logs anymore * Create unique ids in Loki * Fix comment * Fix comment * Store prev response uids in usedUids * Revert "Store prev response uids in usedUids" This reverts commit 05c496e2a8150573513f2574cfef1407de96a72b. * Add comment
This commit is contained in:
@ -575,100 +575,6 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.rows[0].uid).toBe('0');
|
expect(logsModel.rows[0].uid).toBe('0');
|
||||||
});
|
});
|
||||||
|
|
||||||
it('given multiple series with equal ids should return expected logs model', () => {
|
|
||||||
const series: DataFrame[] = [
|
|
||||||
toDataFrame({
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: 'ts',
|
|
||||||
type: FieldType.time,
|
|
||||||
values: ['1970-01-01T00:00:00Z'],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'line',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['WARN boooo 1'],
|
|
||||||
labels: {
|
|
||||||
foo: 'bar',
|
|
||||||
baz: '1',
|
|
||||||
level: 'dbug',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'id',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['0'],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
toDataFrame({
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: 'ts',
|
|
||||||
type: FieldType.time,
|
|
||||||
values: ['1970-01-01T00:00:01Z'],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'line',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['WARN boooo 2'],
|
|
||||||
labels: {
|
|
||||||
foo: 'bar',
|
|
||||||
baz: '2',
|
|
||||||
level: 'dbug',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'id',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['1'],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
toDataFrame({
|
|
||||||
fields: [
|
|
||||||
{
|
|
||||||
name: 'ts',
|
|
||||||
type: FieldType.time,
|
|
||||||
values: ['1970-01-01T00:00:01Z'],
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'line',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['WARN boooo 2'],
|
|
||||||
labels: {
|
|
||||||
foo: 'bar',
|
|
||||||
baz: '2',
|
|
||||||
level: 'dbug',
|
|
||||||
},
|
|
||||||
},
|
|
||||||
{
|
|
||||||
name: 'id',
|
|
||||||
type: FieldType.string,
|
|
||||||
values: ['1'],
|
|
||||||
},
|
|
||||||
],
|
|
||||||
}),
|
|
||||||
];
|
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
|
||||||
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
|
||||||
expect(logsModel.rows).toHaveLength(2);
|
|
||||||
expect(logsModel.rows).toMatchObject([
|
|
||||||
{
|
|
||||||
entry: 'WARN boooo 1',
|
|
||||||
labels: { foo: 'bar' },
|
|
||||||
logLevel: LogLevel.debug,
|
|
||||||
uniqueLabels: { baz: '1' },
|
|
||||||
},
|
|
||||||
{
|
|
||||||
entry: 'WARN boooo 2',
|
|
||||||
labels: { foo: 'bar' },
|
|
||||||
logLevel: LogLevel.debug,
|
|
||||||
uniqueLabels: { baz: '2' },
|
|
||||||
},
|
|
||||||
]);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('logSeriesToLogsModel', () => {
|
describe('logSeriesToLogsModel', () => {
|
||||||
|
@ -32,7 +32,6 @@ import {
|
|||||||
} from '@grafana/data';
|
} from '@grafana/data';
|
||||||
import { getThemeColor } from 'app/core/utils/colors';
|
import { getThemeColor } from 'app/core/utils/colors';
|
||||||
|
|
||||||
import { deduplicateLogRowsById } from 'app/core/utils/explore';
|
|
||||||
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters';
|
import { SIPrefix } from '@grafana/data/src/valueFormats/symbolFormatters';
|
||||||
|
|
||||||
export const LogLevelColor = {
|
export const LogLevelColor = {
|
||||||
@ -393,8 +392,6 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
const deduplicatedLogRows = deduplicateLogRowsById(rows);
|
|
||||||
|
|
||||||
// Meta data to display in status
|
// Meta data to display in status
|
||||||
const meta: LogsMetaItem[] = [];
|
const meta: LogsMetaItem[] = [];
|
||||||
if (_.size(commonLabels) > 0) {
|
if (_.size(commonLabels) > 0) {
|
||||||
@ -416,7 +413,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
if (limits.length > 0) {
|
if (limits.length > 0) {
|
||||||
meta.push({
|
meta.push({
|
||||||
label: 'Limit',
|
label: 'Limit',
|
||||||
value: `${limitValue} (${deduplicatedLogRows.length} returned)`,
|
value: `${limitValue} (${rows.length} returned)`,
|
||||||
kind: LogsMetaKind.String,
|
kind: LogsMetaKind.String,
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
@ -464,7 +461,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
return {
|
return {
|
||||||
hasUniqueLabels,
|
hasUniqueLabels,
|
||||||
meta,
|
meta,
|
||||||
rows: deduplicatedLogRows,
|
rows,
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -12,7 +12,6 @@ import {
|
|||||||
DefaultTimeZone,
|
DefaultTimeZone,
|
||||||
HistoryItem,
|
HistoryItem,
|
||||||
IntervalValues,
|
IntervalValues,
|
||||||
LogRowModel,
|
|
||||||
LogsDedupStrategy,
|
LogsDedupStrategy,
|
||||||
LogsSortOrder,
|
LogsSortOrder,
|
||||||
RawTimeRange,
|
RawTimeRange,
|
||||||
@ -486,10 +485,6 @@ export function getIntervals(range: TimeRange, lowLimit?: string, resolution?: n
|
|||||||
return rangeUtil.calculateInterval(range, resolution, lowLimit);
|
return rangeUtil.calculateInterval(range, resolution, lowLimit);
|
||||||
}
|
}
|
||||||
|
|
||||||
export function deduplicateLogRowsById(rows: LogRowModel[]) {
|
|
||||||
return _.uniqBy(rows, 'uid');
|
|
||||||
}
|
|
||||||
|
|
||||||
export const getFirstNonQueryRowSpecificError = (queryErrors?: DataQueryError[]): DataQueryError | undefined => {
|
export const getFirstNonQueryRowSpecificError = (queryErrors?: DataQueryError[]): DataQueryError | undefined => {
|
||||||
const refId = getValueWithRefId(queryErrors);
|
const refId = getValueWithRefId(queryErrors);
|
||||||
return refId ? undefined : getFirstQueryErrorWithoutRefId(queryErrors);
|
return refId ? undefined : getFirstQueryErrorWithoutRefId(queryErrors);
|
||||||
|
@ -57,8 +57,8 @@ import { FILTER_FOR_OPERATOR, FILTER_OUT_OPERATOR, FilterItem } from '@grafana/u
|
|||||||
|
|
||||||
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
const getStyles = stylesFactory((theme: GrafanaTheme) => {
|
||||||
return {
|
return {
|
||||||
logsMain: css`
|
exploreMain: css`
|
||||||
label: logsMain;
|
label: exploreMain;
|
||||||
// Is needed for some transition animations to work.
|
// Is needed for some transition animations to work.
|
||||||
position: relative;
|
position: relative;
|
||||||
margin-top: 21px;
|
margin-top: 21px;
|
||||||
@ -347,7 +347,7 @@ export class Explore extends React.PureComponent<ExploreProps, ExploreState> {
|
|||||||
}
|
}
|
||||||
|
|
||||||
return (
|
return (
|
||||||
<main className={cx(styles.logsMain)} style={{ width }}>
|
<main className={cx(styles.exploreMain)} style={{ width }}>
|
||||||
<ErrorBoundaryAlert>
|
<ErrorBoundaryAlert>
|
||||||
{showStartPage && StartPage && (
|
{showStartPage && StartPage && (
|
||||||
<div className={'grafana-info-box grafana-info-box--max-lg'}>
|
<div className={'grafana-info-box grafana-info-box--max-lg'}>
|
||||||
|
@ -14,7 +14,7 @@ const getStyles = (theme: GrafanaTheme) => ({
|
|||||||
font-size: ${theme.typography.size.sm};
|
font-size: ${theme.typography.size.sm};
|
||||||
display: flex;
|
display: flex;
|
||||||
flex-flow: column nowrap;
|
flex-flow: column nowrap;
|
||||||
height: 65vh;
|
height: 60vh;
|
||||||
overflow-y: auto;
|
overflow-y: auto;
|
||||||
:first-child {
|
:first-child {
|
||||||
margin-top: auto !important;
|
margin-top: auto !important;
|
||||||
|
@ -71,6 +71,37 @@ describe('loki result transformer', () => {
|
|||||||
expect(data[1].fields[1].values.get(0)).toEqual(streamResult[1].values[0][1]);
|
expect(data[1].fields[1].values.get(0)).toEqual(streamResult[1].values[0][1]);
|
||||||
expect(data[1].fields[2].values.get(0)).toEqual('75d73d66cff40f9d1a1f2d5a0bf295d0');
|
expect(data[1].fields[2].values.get(0)).toEqual('75d73d66cff40f9d1a1f2d5a0bf295d0');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should always generate unique ids for logs', () => {
|
||||||
|
const streamResultWithDuplicateLogs: LokiStreamResult[] = [
|
||||||
|
{
|
||||||
|
stream: {
|
||||||
|
foo: 'bar',
|
||||||
|
},
|
||||||
|
|
||||||
|
values: [
|
||||||
|
['1579857562021616000', 't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"'],
|
||||||
|
['1579857562021616000', 't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"'],
|
||||||
|
['1579857562021616000', 't=2020-02-12T15:04:51+0000 lvl=info msg="Non-duplicated"'],
|
||||||
|
['1579857562021616000', 't=2020-02-12T15:04:51+0000 lvl=info msg="Duplicated"'],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
stream: {
|
||||||
|
bar: 'foo',
|
||||||
|
},
|
||||||
|
values: [['1579857562021617000', 't=2020-02-12T15:04:51+0000 lvl=info msg="Non-dupliicated"']],
|
||||||
|
},
|
||||||
|
];
|
||||||
|
|
||||||
|
const data = streamResultWithDuplicateLogs.map(stream => ResultTransformer.lokiStreamResultToDataFrame(stream));
|
||||||
|
|
||||||
|
expect(data[0].fields[2].values.get(0)).toEqual('65cee200875f58ee1430d8bd2e8b74e7');
|
||||||
|
expect(data[0].fields[2].values.get(1)).toEqual('65cee200875f58ee1430d8bd2e8b74e7_1');
|
||||||
|
expect(data[0].fields[2].values.get(2)).not.toEqual('65cee200875f58ee1430d8bd2e8b74e7_2');
|
||||||
|
expect(data[0].fields[2].values.get(3)).toEqual('65cee200875f58ee1430d8bd2e8b74e7_2');
|
||||||
|
expect(data[1].fields[2].values.get(0)).not.toEqual('65cee200875f58ee1430d8bd2e8b74e7_3');
|
||||||
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
describe('lokiStreamsToDataFrames', () => {
|
describe('lokiStreamsToDataFrames', () => {
|
||||||
@ -131,7 +162,44 @@ describe('loki result transformer', () => {
|
|||||||
id: '19e8e093d70122b3b53cb6e24efd6e2d',
|
id: '19e8e093d70122b3b53cb6e24efd6e2d',
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
it('should always generate unique ids for logs', () => {
|
||||||
|
const tailResponse: LokiTailResponse = {
|
||||||
|
streams: [
|
||||||
|
{
|
||||||
|
stream: {
|
||||||
|
filename: '/var/log/grafana/grafana.log',
|
||||||
|
job: 'grafana',
|
||||||
|
},
|
||||||
|
values: [
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Dupplicated 1"'],
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Dupplicated 1"'],
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Dupplicated 2"'],
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Not dupplicated"'],
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Dupplicated 1"'],
|
||||||
|
['1581519914265798400', 't=2020-02-12T15:04:51+0000 lvl=info msg="Dupplicated 2"'],
|
||||||
|
],
|
||||||
|
},
|
||||||
|
],
|
||||||
|
};
|
||||||
|
|
||||||
|
const data = new CircularDataFrame({ capacity: 6 });
|
||||||
|
data.addField({ name: 'ts', type: FieldType.time, config: { displayName: 'Time' } });
|
||||||
|
data.addField({ name: 'tsNs', type: FieldType.time, config: { displayName: 'Time ns' } });
|
||||||
|
data.addField({ name: 'line', type: FieldType.string }).labels = { job: 'grafana' };
|
||||||
|
data.addField({ name: 'labels', type: FieldType.other });
|
||||||
|
data.addField({ name: 'id', type: FieldType.string });
|
||||||
|
|
||||||
|
ResultTransformer.appendResponseToBufferedData(tailResponse, data);
|
||||||
|
expect(data.get(0).id).toEqual('870e4d105741bdfc2c67904ee480d4f3');
|
||||||
|
expect(data.get(1).id).toEqual('870e4d105741bdfc2c67904ee480d4f3_1');
|
||||||
|
expect(data.get(2).id).toEqual('707e4ec2b842f389dbb993438505856d');
|
||||||
|
expect(data.get(3).id).toEqual('78f044015a58fad3e257a855b167d85e');
|
||||||
|
expect(data.get(4).id).toEqual('870e4d105741bdfc2c67904ee480d4f3_2');
|
||||||
|
expect(data.get(5).id).toEqual('707e4ec2b842f389dbb993438505856d_1');
|
||||||
});
|
});
|
||||||
|
});
|
||||||
|
|
||||||
describe('createMetricLabel', () => {
|
describe('createMetricLabel', () => {
|
||||||
it('should create correct label based on passed variables', () => {
|
it('should create correct label based on passed variables', () => {
|
||||||
const label = ResultTransformer.createMetricLabel({}, ({
|
const label = ResultTransformer.createMetricLabel({}, ({
|
||||||
|
@ -53,12 +53,15 @@ export function lokiStreamResultToDataFrame(stream: LokiStreamResult, reverse?:
|
|||||||
const lines = new ArrayVector<string>([]);
|
const lines = new ArrayVector<string>([]);
|
||||||
const uids = new ArrayVector<string>([]);
|
const uids = new ArrayVector<string>([]);
|
||||||
|
|
||||||
|
// We need to store and track all used uids to ensure that uids are unique
|
||||||
|
const usedUids: { string?: number } = {};
|
||||||
|
|
||||||
for (const [ts, line] of stream.values) {
|
for (const [ts, line] of stream.values) {
|
||||||
// num ns epoch in string, we convert it to iso string here so it matches old format
|
// num ns epoch in string, we convert it to iso string here so it matches old format
|
||||||
times.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
|
times.add(new Date(parseInt(ts.substr(0, ts.length - 6), 10)).toISOString());
|
||||||
timesNs.add(ts);
|
timesNs.add(ts);
|
||||||
lines.add(line);
|
lines.add(line);
|
||||||
uids.add(createUid(ts, labelsString, line));
|
uids.add(createUid(ts, labelsString, line, usedUids));
|
||||||
}
|
}
|
||||||
|
|
||||||
return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
|
return constructDataFrame(times, timesNs, lines, uids, labels, reverse, refId);
|
||||||
@ -127,6 +130,10 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
|||||||
const labelsField = data.fields[3];
|
const labelsField = data.fields[3];
|
||||||
const idField = data.fields[4];
|
const idField = data.fields[4];
|
||||||
|
|
||||||
|
// We are comparing used ids only within the received stream. This could be a problem if the same line + labels + nanosecond timestamp came in 2 separate batches.
|
||||||
|
// As this is very unlikely, and the result would only affect live-tailing css animation we have decided to not compare all received uids from data param as this would slow down processing.
|
||||||
|
const usedUids: { string?: number } = {};
|
||||||
|
|
||||||
for (const stream of streams) {
|
for (const stream of streams) {
|
||||||
// Find unique labels
|
// Find unique labels
|
||||||
const unique = findUniqueLabels(stream.stream, baseLabels);
|
const unique = findUniqueLabels(stream.stream, baseLabels);
|
||||||
@ -141,13 +148,29 @@ export function appendResponseToBufferedData(response: LokiTailResponse, data: M
|
|||||||
tsNsField.values.add(ts);
|
tsNsField.values.add(ts);
|
||||||
lineField.values.add(line);
|
lineField.values.add(line);
|
||||||
labelsField.values.add(unique);
|
labelsField.values.add(unique);
|
||||||
idField.values.add(createUid(ts, allLabelsString, line));
|
idField.values.add(createUid(ts, allLabelsString, line, usedUids));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function createUid(ts: string, labelsString: string, line: string): string {
|
function createUid(ts: string, labelsString: string, line: string, usedUids: any): string {
|
||||||
return md5(`${ts}_${labelsString}_${line}`);
|
// Generate id as hashed nanosecond timestamp, labels and line (this does not have to be unique)
|
||||||
|
let id = md5(`${ts}_${labelsString}_${line}`);
|
||||||
|
|
||||||
|
// Check if generated id is unique
|
||||||
|
// If not and we've already used it, append it's count after it
|
||||||
|
if (id in usedUids) {
|
||||||
|
// Increase the count
|
||||||
|
const newCount = usedUids[id] + 1;
|
||||||
|
usedUids[id] = newCount;
|
||||||
|
// Append count to generated id to make it unique
|
||||||
|
id = `${id}_${newCount}`;
|
||||||
|
} else {
|
||||||
|
// If id is unique and wasn't used, add it to usedUids and start count at 0
|
||||||
|
usedUids[id] = 0;
|
||||||
|
}
|
||||||
|
// Return unique id
|
||||||
|
return id;
|
||||||
}
|
}
|
||||||
|
|
||||||
function lokiMatrixToTimeSeries(matrixResult: LokiMatrixResult, options: TransformerOptions): TimeSeries {
|
function lokiMatrixToTimeSeries(matrixResult: LokiMatrixResult, options: TransformerOptions): TimeSeries {
|
||||||
|
Reference in New Issue
Block a user