mirror of
https://github.com/grafana/grafana.git
synced 2025-09-18 22:02:52 +08:00
Chore: Fix bunch of strict null error to fix master CI (#23443)
* Fix bunch of null error * Fix failing test * Another test fix
This commit is contained in:
@ -38,7 +38,7 @@ export class PanelPlugin<TOptions = any, TFieldConfigOptions extends object = an
|
|||||||
private _optionEditors?: PanelOptionEditorsRegistry;
|
private _optionEditors?: PanelOptionEditorsRegistry;
|
||||||
private registerOptionEditors?: (builder: PanelOptionsEditorBuilder<TOptions>) => void;
|
private registerOptionEditors?: (builder: PanelOptionsEditorBuilder<TOptions>) => void;
|
||||||
|
|
||||||
panel: ComponentType<PanelProps<TOptions>>;
|
panel: ComponentType<PanelProps<TOptions>> | null;
|
||||||
editor?: ComponentClass<PanelEditorProps<TOptions>>;
|
editor?: ComponentClass<PanelEditorProps<TOptions>>;
|
||||||
onPanelMigration?: PanelMigrationHandler<TOptions>;
|
onPanelMigration?: PanelMigrationHandler<TOptions>;
|
||||||
onPanelTypeChanged?: PanelTypeChangedHandler<TOptions>;
|
onPanelTypeChanged?: PanelTypeChangedHandler<TOptions>;
|
||||||
@ -49,7 +49,7 @@ export class PanelPlugin<TOptions = any, TFieldConfigOptions extends object = an
|
|||||||
*/
|
*/
|
||||||
angularPanelCtrl?: any;
|
angularPanelCtrl?: any;
|
||||||
|
|
||||||
constructor(panel: ComponentType<PanelProps<TOptions>>) {
|
constructor(panel: ComponentType<PanelProps<TOptions>> | null) {
|
||||||
super();
|
super();
|
||||||
this.panel = panel;
|
this.panel = panel;
|
||||||
}
|
}
|
||||||
|
@ -51,7 +51,7 @@ export function findCommonLabels(labelsSets: Labels[]): Labels {
|
|||||||
/**
|
/**
|
||||||
* Returns a map of labels that are in `labels`, but not in `commonLabels`.
|
* Returns a map of labels that are in `labels`, but not in `commonLabels`.
|
||||||
*/
|
*/
|
||||||
export function findUniqueLabels(labels: Labels, commonLabels: Labels): Labels {
|
export function findUniqueLabels(labels: Labels | undefined, commonLabels: Labels): Labels {
|
||||||
const uncommonLabels: Labels = { ...labels };
|
const uncommonLabels: Labels = { ...labels };
|
||||||
Object.keys(commonLabels).forEach(key => {
|
Object.keys(commonLabels).forEach(key => {
|
||||||
delete uncommonLabels[key];
|
delete uncommonLabels[key];
|
||||||
|
@ -218,7 +218,7 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
},
|
},
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.hasUniqueLabels).toBeFalsy();
|
expect(logsModel.hasUniqueLabels).toBeFalsy();
|
||||||
expect(logsModel.rows).toHaveLength(2);
|
expect(logsModel.rows).toHaveLength(2);
|
||||||
expect(logsModel.rows).toMatchObject([
|
expect(logsModel.rows).toMatchObject([
|
||||||
@ -274,12 +274,12 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.rows).toHaveLength(1);
|
expect(logsModel.rows).toHaveLength(1);
|
||||||
expect(logsModel.rows).toMatchObject([
|
expect(logsModel.rows).toMatchObject([
|
||||||
{
|
{
|
||||||
entry: 'WARN boooo',
|
entry: 'WARN boooo',
|
||||||
labels: undefined,
|
labels: {},
|
||||||
logLevel: LogLevel.debug,
|
logLevel: LogLevel.debug,
|
||||||
uniqueLabels: {},
|
uniqueLabels: {},
|
||||||
},
|
},
|
||||||
@ -338,7 +338,7 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
||||||
expect(logsModel.rows).toHaveLength(3);
|
expect(logsModel.rows).toHaveLength(3);
|
||||||
expect(logsModel.rows).toMatchObject([
|
expect(logsModel.rows).toMatchObject([
|
||||||
@ -448,7 +448,7 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
expect(logsModel.hasUniqueLabels).toBeTruthy();
|
||||||
expect(logsModel.rows).toHaveLength(4);
|
expect(logsModel.rows).toHaveLength(4);
|
||||||
expect(logsModel.rows).toMatchObject([
|
expect(logsModel.rows).toMatchObject([
|
||||||
@ -497,7 +497,7 @@ describe('dataFrameToLogsModel', () => {
|
|||||||
],
|
],
|
||||||
}),
|
}),
|
||||||
];
|
];
|
||||||
const logsModel = dataFrameToLogsModel(series, 0, 'utc');
|
const logsModel = dataFrameToLogsModel(series, 1, 'utc');
|
||||||
expect(logsModel.rows[0].uid).toBe('0');
|
expect(logsModel.rows[0].uid).toBe('0');
|
||||||
});
|
});
|
||||||
|
|
||||||
|
@ -192,14 +192,19 @@ function isLogsData(series: DataFrame) {
|
|||||||
* @param dataFrame
|
* @param dataFrame
|
||||||
* @param intervalMs In case there are no metrics series, we use this for computing it from log rows.
|
* @param intervalMs In case there are no metrics series, we use this for computing it from log rows.
|
||||||
*/
|
*/
|
||||||
export function dataFrameToLogsModel(dataFrame: DataFrame[], intervalMs: number, timeZone: TimeZone): LogsModel {
|
export function dataFrameToLogsModel(
|
||||||
|
dataFrame: DataFrame[],
|
||||||
|
intervalMs: number | undefined,
|
||||||
|
timeZone: TimeZone
|
||||||
|
): LogsModel {
|
||||||
const { logSeries, metricSeries } = separateLogsAndMetrics(dataFrame);
|
const { logSeries, metricSeries } = separateLogsAndMetrics(dataFrame);
|
||||||
const logsModel = logSeriesToLogsModel(logSeries);
|
const logsModel = logSeriesToLogsModel(logSeries);
|
||||||
|
|
||||||
if (logsModel) {
|
if (logsModel) {
|
||||||
if (metricSeries.length === 0) {
|
if (metricSeries.length === 0) {
|
||||||
// Create metrics from logs
|
// Create metrics from logs
|
||||||
logsModel.series = makeSeriesForLogs(logsModel.rows, intervalMs, timeZone);
|
// If interval is not defined or 0 we cannot really compute the series
|
||||||
|
logsModel.series = intervalMs ? makeSeriesForLogs(logsModel.rows, intervalMs, timeZone) : [];
|
||||||
} else {
|
} else {
|
||||||
// We got metrics in the dataFrame so process those
|
// We got metrics in the dataFrame so process those
|
||||||
logsModel.series = getGraphSeriesModel(
|
logsModel.series = getGraphSeriesModel(
|
||||||
@ -270,7 +275,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
// Assume the first string field in the dataFrame is the message. This was right so far but probably needs some
|
// Assume the first string field in the dataFrame is the message. This was right so far but probably needs some
|
||||||
// more explicit checks.
|
// more explicit checks.
|
||||||
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
|
const stringField = fieldCache.getFirstFieldOfType(FieldType.string);
|
||||||
if (stringField.labels) {
|
if (stringField?.labels) {
|
||||||
allLabels.push(stringField.labels);
|
allLabels.push(stringField.labels);
|
||||||
}
|
}
|
||||||
return {
|
return {
|
||||||
@ -279,7 +284,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
stringField,
|
stringField,
|
||||||
logLevelField: fieldCache.getFieldByName('level'),
|
logLevelField: fieldCache.getFieldByName('level'),
|
||||||
idField: getIdField(fieldCache),
|
idField: getIdField(fieldCache),
|
||||||
};
|
} as LogFields;
|
||||||
});
|
});
|
||||||
|
|
||||||
const commonLabels = allLabels.length > 0 ? findCommonLabels(allLabels) : {};
|
const commonLabels = allLabels.length > 0 ? findCommonLabels(allLabels) : {};
|
||||||
@ -334,7 +339,7 @@ export function logSeriesToLogsModel(logSeries: DataFrame[]): LogsModel | undefi
|
|||||||
searchWords,
|
searchWords,
|
||||||
entry: hasAnsi ? ansicolor.strip(message) : message,
|
entry: hasAnsi ? ansicolor.strip(message) : message,
|
||||||
raw: message,
|
raw: message,
|
||||||
labels: stringField.labels,
|
labels: stringField.labels || {},
|
||||||
uid: idField ? idField.values.get(j) : j.toString(),
|
uid: idField ? idField.values.get(j) : j.toString(),
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
@ -507,7 +507,7 @@ export enum SortOrder {
|
|||||||
export const refreshIntervalToSortOrder = (refreshInterval?: string) =>
|
export const refreshIntervalToSortOrder = (refreshInterval?: string) =>
|
||||||
RefreshPicker.isLive(refreshInterval) ? SortOrder.Ascending : SortOrder.Descending;
|
RefreshPicker.isLive(refreshInterval) ? SortOrder.Ascending : SortOrder.Descending;
|
||||||
|
|
||||||
export const sortLogsResult = (logsResult: LogsModel, sortOrder: SortOrder): LogsModel => {
|
export const sortLogsResult = (logsResult: LogsModel | null, sortOrder: SortOrder): LogsModel => {
|
||||||
const rows = logsResult ? logsResult.rows : [];
|
const rows = logsResult ? logsResult.rows : [];
|
||||||
sortOrder === SortOrder.Ascending ? rows.sort(sortInAscendingOrder) : rows.sort(sortInDescendingOrder);
|
sortOrder === SortOrder.Ascending ? rows.sort(sortInAscendingOrder) : rows.sort(sortInDescendingOrder);
|
||||||
const result: LogsModel = logsResult ? { ...logsResult, rows } : { hasUniqueLabels: false, rows };
|
const result: LogsModel = logsResult ? { ...logsResult, rows } : { hasUniqueLabels: false, rows };
|
||||||
|
@ -179,7 +179,7 @@ describe('ResultProcessor', () => {
|
|||||||
entry: 'third',
|
entry: 'third',
|
||||||
entryFieldIndex: 2,
|
entryFieldIndex: 2,
|
||||||
hasAnsi: false,
|
hasAnsi: false,
|
||||||
labels: undefined,
|
labels: {},
|
||||||
logLevel: 'unknown',
|
logLevel: 'unknown',
|
||||||
raw: 'third',
|
raw: 'third',
|
||||||
searchWords: [] as string[],
|
searchWords: [] as string[],
|
||||||
@ -196,7 +196,7 @@ describe('ResultProcessor', () => {
|
|||||||
entry: 'second message',
|
entry: 'second message',
|
||||||
entryFieldIndex: 2,
|
entryFieldIndex: 2,
|
||||||
hasAnsi: false,
|
hasAnsi: false,
|
||||||
labels: undefined,
|
labels: {},
|
||||||
logLevel: 'unknown',
|
logLevel: 'unknown',
|
||||||
raw: 'second message',
|
raw: 'second message',
|
||||||
searchWords: [] as string[],
|
searchWords: [] as string[],
|
||||||
@ -213,7 +213,7 @@ describe('ResultProcessor', () => {
|
|||||||
entry: 'this is a message',
|
entry: 'this is a message',
|
||||||
entryFieldIndex: 2,
|
entryFieldIndex: 2,
|
||||||
hasAnsi: false,
|
hasAnsi: false,
|
||||||
labels: undefined,
|
labels: {},
|
||||||
logLevel: 'unknown',
|
logLevel: 'unknown',
|
||||||
raw: 'this is a message',
|
raw: 'this is a message',
|
||||||
searchWords: [] as string[],
|
searchWords: [] as string[],
|
||||||
|
@ -21,7 +21,7 @@ export const LogsPanel: React.FunctionComponent<LogsPanelProps> = ({
|
|||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
const newResults = data ? dataFrameToLogsModel(data.series, data.request.intervalMs, timeZone) : null;
|
const newResults = data ? dataFrameToLogsModel(data.series, data.request?.intervalMs, timeZone) : null;
|
||||||
const sortedNewResults = sortLogsResult(newResults, sortOrder);
|
const sortedNewResults = sortLogsResult(newResults, sortOrder);
|
||||||
|
|
||||||
return (
|
return (
|
||||||
|
@ -658,7 +658,7 @@ class SingleStatCtrl extends MetricsPanelCtrl {
|
|||||||
window.location.href = linkInfo.href;
|
window.location.href = linkInfo.href;
|
||||||
} else {
|
} else {
|
||||||
$timeout(() => {
|
$timeout(() => {
|
||||||
$location.url(locationUtil.stripBaseFromUrl(linkInfo.href));
|
$location.url(locationUtil.stripBaseFromUrl(linkInfo!.href));
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,7 +4,7 @@ echo -e "Collecting code stats (typescript errors & more)"
|
|||||||
|
|
||||||
|
|
||||||
|
|
||||||
ERROR_COUNT_LIMIT=798
|
ERROR_COUNT_LIMIT=795
|
||||||
DIRECTIVES_LIMIT=172
|
DIRECTIVES_LIMIT=172
|
||||||
CONTROLLERS_LIMIT=139
|
CONTROLLERS_LIMIT=139
|
||||||
|
|
||||||
|
Reference in New Issue
Block a user