New Logs Panel: Details and JSON adjustments (#109867)

* processing: detect json logs to apply custom grammar

* LogLineDetails: improve label column width

* wip

* Remove log

* grammar: remove unused grammar

* grammar: fix log grammar

* LogLineDetails: improve margins

* LogLineDetails: further fine tune width

* Update tests

* processing: more tests
This commit is contained in:
Matias Chomicki
2025-08-19 19:23:18 +02:00
committed by GitHub
parent 247373ac41
commit 0113f12c7d
6 changed files with 106 additions and 22 deletions

View File

@ -176,7 +176,7 @@ const getStyles = (theme: GrafanaTheme2, mode: LogLineDetailsMode) => ({
inlineWrapper: css({ inlineWrapper: css({
gridColumn: '1 / -1', gridColumn: '1 / -1',
height: `${LOG_LINE_DETAILS_HEIGHT}vh`, height: `${LOG_LINE_DETAILS_HEIGHT}vh`,
paddingBottom: theme.spacing(0.5), padding: theme.spacing(1, 2, 1.5, 2),
marginRight: 1, marginRight: 1,
}), }),
container: css({ container: css({

View File

@ -105,7 +105,7 @@ const getFieldsStyles = (theme: GrafanaTheme2) => ({
fieldsTable: css({ fieldsTable: css({
display: 'grid', display: 'grid',
gap: theme.spacing(1), gap: theme.spacing(1),
gridTemplateColumns: `${theme.spacing(11.5)} auto 1fr`, gridTemplateColumns: `${theme.spacing(11.5)} minmax(auto, 40%) 1fr`,
}), }),
fieldsTableNoActions: css({ fieldsTableNoActions: css({
display: 'grid', display: 'grid',

View File

@ -7,6 +7,8 @@ import { generateLogGrammar } from './grammar';
describe('generateLogGrammar', () => { describe('generateLogGrammar', () => {
function generateScenario(entry: string) { function generateScenario(entry: string) {
const log = createLogLine({ labels: { place: 'luna', source: 'logs' }, entry }); const log = createLogLine({ labels: { place: 'luna', source: 'logs' }, entry });
// Access body getter to trigger LogLineModel internals
expect(log.body).toBeDefined();
const grammar = generateLogGrammar(log); const grammar = generateLogGrammar(log);
const tokens = Prism.tokenize(log.entry, grammar); const tokens = Prism.tokenize(log.entry, grammar);
return { log, grammar, tokens }; return { log, grammar, tokens };
@ -29,7 +31,7 @@ describe('generateLogGrammar', () => {
expect(tokens[1].type).toBe('log-token-json-key'); expect(tokens[1].type).toBe('log-token-json-key');
} }
if (tokens[3] instanceof Token) { if (tokens[3] instanceof Token) {
expect(tokens[3].content).toBe('"value"'); expect(tokens[3].content).toEqual(['"value"']);
expect(tokens[3].type).toBe('log-token-string'); expect(tokens[3].type).toBe('log-token-string');
} }
if (tokens[5] instanceof Token) { if (tokens[5] instanceof Token) {
@ -37,10 +39,10 @@ describe('generateLogGrammar', () => {
expect(tokens[5].type).toBe('log-token-json-key'); expect(tokens[5].type).toBe('log-token-json-key');
} }
if (tokens[7] instanceof Token) { if (tokens[7] instanceof Token) {
expect(tokens[7].content).toBe('"value2"'); expect(tokens[7].content).toEqual(['"value2"']);
expect(tokens[7].type).toBe('log-token-string'); expect(tokens[7].type).toBe('log-token-string');
} }
expect.assertions(8); expect.assertions(9);
}); });
test('Identifies sizes', () => { test('Identifies sizes', () => {
@ -53,7 +55,7 @@ describe('generateLogGrammar', () => {
expect(tokens[2].content).toBe('2 KB'); expect(tokens[2].content).toBe('2 KB');
expect(tokens[2].type).toBe('log-token-size'); expect(tokens[2].type).toBe('log-token-size');
} }
expect.assertions(4); expect.assertions(5);
}); });
test('Identifies durations', () => { test('Identifies durations', () => {
@ -70,7 +72,7 @@ describe('generateLogGrammar', () => {
expect(tokens[4].content).toBe('1h'); expect(tokens[4].content).toBe('1h');
expect(tokens[4].type).toBe('log-token-duration'); expect(tokens[4].type).toBe('log-token-duration');
} }
expect.assertions(6); expect.assertions(7);
}); });
test.each(['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'])( test.each(['GET', 'POST', 'PUT', 'DELETE', 'PATCH', 'HEAD', 'OPTIONS', 'TRACE', 'CONNECT'])(
@ -81,7 +83,7 @@ describe('generateLogGrammar', () => {
expect(tokens[1].content).toBe(method); expect(tokens[1].content).toBe(method);
expect(tokens[1].type).toBe('log-token-method'); expect(tokens[1].type).toBe('log-token-method');
} }
expect.assertions(2); expect.assertions(3);
} }
); );
}); });

View File

@ -5,14 +5,33 @@ import { escapeRegex, parseFlags } from '@grafana/data';
import { LogListModel } from './processing'; import { LogListModel } from './processing';
// The Logs grammar is used for highlight in the logs panel // The Logs grammar is used for highlight in the logs panel
export const logsGrammar: Grammar = { const logsGrammar: Grammar = {
'log-token-uuid': /[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}/g,
'log-token-json-key': /"(\b|\B)[\w-]+"(?=\s*:)/gi,
'log-token-key': /(\b|\B)[\w_]+(?=\s*=)/gi, 'log-token-key': /(\b|\B)[\w_]+(?=\s*=)/gi,
'log-token-string': /"(?!:)([^'"])*?"(?!:)/g,
};
const tokensGrammar: Grammar = {
'log-token-uuid': /[0-9a-fA-F]{8}-[0-9a-fA-F]{4}-[1-5][0-9a-fA-F]{3}-[89abAB][0-9a-fA-F]{3}-[0-9a-fA-F]{12}/g,
'log-token-size': /(?:\b|")\d+\.{0,1}\d*\s*[kKmMGgtTPp]*[bB]{1}(?:"|\b)/g, 'log-token-size': /(?:\b|")\d+\.{0,1}\d*\s*[kKmMGgtTPp]*[bB]{1}(?:"|\b)/g,
'log-token-duration': /(?:\b)\d+(\.\d+)?(ns|µs|ms|s|m|h|d)(?:\b)/g, 'log-token-duration': /(?:\b)\d+(\.\d+)?(ns|µs|ms|s|m|h|d)(?:\b)/g,
'log-token-method': /\b(GET|POST|PUT|DELETE|PATCH|HEAD|OPTIONS|TRACE|CONNECT)\b/g, 'log-token-method': /\b(GET|POST|PUT|DELETE|PATCH|HEAD|OPTIONS|TRACE|CONNECT)\b/g,
'log-token-string': /"(?!:)([^'"])*?"(?!:)/g, };
const jsonGrammar: Grammar = {
'log-token-json-key': {
pattern: /(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?=\s*:)/,
lookbehind: true,
greedy: true,
},
'log-token-string': {
pattern: /(^|[^\\])"(?:\\.|[^\\"\r\n])*"(?!\s*:)/,
lookbehind: true,
greedy: true,
inside: {
...tokensGrammar,
},
},
'log-token-size': /-?\b\d+(?:\.\d+)?(?:e[+-]?\d+)?\b/i,
}; };
export const generateLogGrammar = (log: LogListModel) => { export const generateLogGrammar = (log: LogListModel) => {
@ -20,8 +39,15 @@ export const generateLogGrammar = (log: LogListModel) => {
const logGrammar: Grammar = { const logGrammar: Grammar = {
'log-token-label': new RegExp(`\\b(${labels.join('|')})(?:[=:]{1})\\b`, 'g'), 'log-token-label': new RegExp(`\\b(${labels.join('|')})(?:[=:]{1})\\b`, 'g'),
}; };
if (log.isJSON) {
return {
...logGrammar,
...jsonGrammar,
};
}
return { return {
...logGrammar, ...logGrammar,
...tokensGrammar,
...logsGrammar, ...logsGrammar,
}; };
}; };

View File

@ -142,7 +142,7 @@ describe('preProcessLogs', () => {
expect(logListModel.getDisplayedFieldValue(LOG_LINE_BODY_FIELD_NAME, true)).toBe('log message 1'); expect(logListModel.getDisplayedFieldValue(LOG_LINE_BODY_FIELD_NAME, true)).toBe('log message 1');
}); });
test('Prettifies JSON', () => { test('Does not modify unwrapped JSON', () => {
const entry = '{"key": "value", "otherKey": "otherValue"}'; const entry = '{"key": "value", "otherKey": "otherValue"}';
const logListModel = createLogLine( const logListModel = createLogLine(
{ entry }, { entry },
@ -154,6 +154,21 @@ describe('preProcessLogs', () => {
} }
); );
expect(logListModel.entry).toBe(entry); expect(logListModel.entry).toBe(entry);
expect(logListModel.body).toBe(entry);
});
test('Prettifies wrapped JSON', () => {
const entry = '{"key": "value", "otherKey": "otherValue"}';
const logListModel = createLogLine(
{ entry },
{
escape: false,
order: LogsSortOrder.Descending,
timeZone: 'browser',
wrapLogMessage: true, // wrapped
}
);
expect(logListModel.entry).toBe(entry);
expect(logListModel.body).not.toBe(entry); expect(logListModel.body).not.toBe(entry);
}); });
@ -171,6 +186,40 @@ describe('preProcessLogs', () => {
expect(logListModel.entry).toBe(entry); expect(logListModel.entry).toBe(entry);
expect(logListModel.body).toContain('90071992547409911'); expect(logListModel.body).toContain('90071992547409911');
}); });
test.each([
'{"timestamp":"2025-08-19T12:34:56Z","level":"INFO","message":"User logged in","user_id":1234}',
'{"time":"2025-08-19T12:35:10Z","level":"ERROR","service":"payment","error":"Insufficient funds","transaction_id":"tx-98765"}',
'{"ts":1692444912,"lvl":"WARN","component":"auth","msg":"Token expired","session_id":"abcd1234"}',
'{"@timestamp":"2025-08-19T12:36:00Z","severity":"DEBUG","event":"cache_hit","key":"user_profile:1234","duration_ms":3}',
'{}',
])('Detects JSON logs', (entry: string) => {
const logListModel = createLogLine(
{ entry },
{
escape: false,
order: LogsSortOrder.Descending,
timeZone: 'browser',
wrapLogMessage: false,
}
);
expect(logListModel.body).toBeDefined(); // Triggers parsing
expect(logListModel.isJSON).toBe(true);
});
test.each(['1', '"1"', 'true', 'null', 'false', 'not json', '"nope"'])('Detects non-JSON logs', (entry: string) => {
const logListModel = createLogLine(
{ entry },
{
escape: false,
order: LogsSortOrder.Descending,
timeZone: 'browser',
wrapLogMessage: false,
}
);
expect(logListModel.body).toBeDefined(); // Triggers parsing
expect(logListModel.isJSON).toBe(false);
});
}); });
test('Orders logs', () => { test('Orders logs', () => {

View File

@ -1,5 +1,5 @@
import ansicolor from 'ansicolor'; import ansicolor from 'ansicolor';
import { parse, stringify } from 'lossless-json'; import { LosslessNumber, parse, stringify } from 'lossless-json';
import Prism, { Grammar } from 'prismjs'; import Prism, { Grammar } from 'prismjs';
import { import {
@ -63,6 +63,7 @@ export class LogListModel implements LogRowModel {
private _getFieldLinks: GetFieldLinksFn | undefined = undefined; private _getFieldLinks: GetFieldLinksFn | undefined = undefined;
private _virtualization?: LogLineVirtualization; private _virtualization?: LogLineVirtualization;
private _wrapLogMessage: boolean; private _wrapLogMessage: boolean;
private _json = false;
constructor( constructor(
log: LogRowModel, log: LogRowModel,
@ -124,9 +125,13 @@ export class LogListModel implements LogRowModel {
get body(): string { get body(): string {
if (this._body === undefined) { if (this._body === undefined) {
try { try {
const parsed = stringify(parse(this.raw), undefined, this._wrapLogMessage ? 2 : 1); const parsed = parse(this.raw);
if (parsed) { if (typeof parsed === 'object' && parsed !== null && !(parsed instanceof LosslessNumber)) {
this.raw = parsed; this._json = true;
}
const reStringified = this._wrapLogMessage ? stringify(parsed, undefined, 2) : this.raw;
if (reStringified) {
this.raw = reStringified;
} }
} catch (error) {} } catch (error) {}
const raw = config.featureToggles.otelLogsFormatting && this.otelLanguage ? getOtelFormattedBody(this) : this.raw; const raw = config.featureToggles.otelLogsFormatting && this.otelLanguage ? getOtelFormattedBody(this) : this.raw;
@ -153,17 +158,19 @@ export class LogListModel implements LogRowModel {
get highlightedBody() { get highlightedBody() {
if (this._highlightedBody === undefined) { if (this._highlightedBody === undefined) {
// Body is accessed first to trigger the getter code before generateLogGrammar()
const sanitizedBody = textUtil.sanitize(this.body);
this._grammar = this._grammar ?? generateLogGrammar(this); this._grammar = this._grammar ?? generateLogGrammar(this);
const extraGrammar = generateTextMatchGrammar(this.searchWords, this._currentSearch); const extraGrammar = generateTextMatchGrammar(this.searchWords, this._currentSearch);
this._highlightedBody = Prism.highlight( this._highlightedBody = Prism.highlight(sanitizedBody, { ...extraGrammar, ...this._grammar }, 'lokiql');
textUtil.sanitize(this.body),
{ ...extraGrammar, ...this._grammar },
'lokiql'
);
} }
return this._highlightedBody; return this._highlightedBody;
} }
get isJSON() {
return this._json;
}
get sampledMessage(): string | undefined { get sampledMessage(): string | undefined {
return checkLogsSampled(this); return checkLogsSampled(this);
} }