mirror of
https://github.com/grafana/grafana.git
synced 2025-08-02 09:21:47 +08:00
Toolkit: Remove plugin:ci-build
plugin:ci-package
plugin:ci-report
and related files (#67212)
This commit is contained in:
@ -885,17 +885,11 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/cli/tasks/plugin.ci.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/cli/tasks/plugin.utils.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/cli/tasks/plugin/bundle.managed.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/cli/tasks/task.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
@ -925,28 +919,6 @@ exports[`better eslint`] = {
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "2"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/plugins/manifest.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "2"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "3"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "4"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "5"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "6"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "7"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "8"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "9"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/plugins/types.ts:5381": [
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "0"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/plugins/utils.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"]
|
||||
],
|
||||
"packages/grafana-toolkit/src/plugins/workflow.ts:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Do not use any type assertions.", "1"]
|
||||
],
|
||||
"packages/grafana-ui/src/components/Card/Card.tsx:5381": [
|
||||
[0, 0, 0, "Do not use any type assertions.", "0"],
|
||||
[0, 0, 0, "Unexpected any. Specify a different type.", "1"]
|
||||
|
@ -94,7 +94,6 @@
|
||||
"less": "^4.1.2",
|
||||
"less-loader": "^10.2.0",
|
||||
"lodash": "^4.17.21",
|
||||
"md5-file": "^5.0.0",
|
||||
"mini-css-extract-plugin": "^2.6.0",
|
||||
"ora": "^5.4.1",
|
||||
"postcss": "^8.4.12",
|
||||
|
@ -4,10 +4,7 @@ import { program } from 'commander';
|
||||
import { nodeVersionCheckerTask } from './tasks/nodeVersionChecker';
|
||||
import { buildPackageTask } from './tasks/package.build';
|
||||
import { pluginBuildTask } from './tasks/plugin.build';
|
||||
import { ciBuildPluginTask, ciPackagePluginTask, ciPluginReportTask } from './tasks/plugin.ci';
|
||||
import { pluginUpdateTask } from './tasks/plugin.update';
|
||||
import { getToolkitVersion, githubPublishTask } from './tasks/plugin.utils';
|
||||
import { bundleManagedTask } from './tasks/plugin/bundle.managed';
|
||||
import { templateTask } from './tasks/template';
|
||||
import { toolkitBuildTask } from './tasks/toolkit.build';
|
||||
import { execTask } from './utils/execTask';
|
||||
@ -130,53 +127,6 @@ export const run = (includeInternalScripts = false) => {
|
||||
process.exit(1);
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-build')
|
||||
.option('--finish', 'move all results to the jobs folder', false)
|
||||
.option('--maxJestWorkers <num>|<string>', 'Limit number of Jest workers spawned')
|
||||
.description('[deprecated] Build the plugin, leaving results in /dist and /coverage')
|
||||
.action(async (cmd) => {
|
||||
await execTask(ciBuildPluginTask)({
|
||||
finish: cmd.finish,
|
||||
maxJestWorkers: cmd.maxJestWorkers,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-package')
|
||||
.option('--signatureType <type>', 'Signature Type')
|
||||
.option('--rootUrls <urls...>', 'Root URLs')
|
||||
.option('--signing-admin', 'Use the admin API endpoint for signing the manifest. (deprecated)', false)
|
||||
.description('[deprecated] Create a zip packages for the plugin')
|
||||
.action(async (cmd) => {
|
||||
await execTask(ciPackagePluginTask)({
|
||||
signatureType: cmd.signatureType,
|
||||
rootUrls: cmd.rootUrls,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:ci-report')
|
||||
.description('[deprecated] Build a report for this whole process')
|
||||
.option('--upload', 'upload packages also')
|
||||
.action(async (cmd) => {
|
||||
await execTask(ciPluginReportTask)({
|
||||
upload: cmd.upload,
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:bundle-managed')
|
||||
.description('[Deprecated] Builds managed plugins')
|
||||
.action(async (cmd) => {
|
||||
console.log(
|
||||
chalk.yellow.bold(
|
||||
`⚠️ This command is deprecated and will be removed in v10. No further support will be provided. ⚠️`
|
||||
)
|
||||
);
|
||||
await execTask(bundleManagedTask)({});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:github-publish')
|
||||
.option('--dryrun', 'Do a dry run only', false)
|
||||
@ -197,18 +147,6 @@ export const run = (includeInternalScripts = false) => {
|
||||
});
|
||||
});
|
||||
|
||||
program
|
||||
.command('plugin:update-circleci')
|
||||
.description('[Deprecated] Update plugin')
|
||||
.action(async (cmd) => {
|
||||
console.log(
|
||||
chalk.yellow.bold(
|
||||
`⚠️ This command is deprecated and will be removed in v10. No further support will be provided. ⚠️`
|
||||
)
|
||||
);
|
||||
await execTask(pluginUpdateTask)({});
|
||||
});
|
||||
|
||||
program.on('command:*', () => {
|
||||
console.error('Invalid command: %s\nSee --help for a list of available commands.', program.args.join(' '));
|
||||
process.exit(1);
|
||||
|
@ -1,258 +0,0 @@
|
||||
import execa = require('execa');
|
||||
import fs from 'fs-extra';
|
||||
import path = require('path');
|
||||
import rimrafCallback from 'rimraf';
|
||||
import { promisify } from 'util';
|
||||
|
||||
import { getPluginId } from '../../config/utils/getPluginId';
|
||||
import { assertRootUrlIsValid, getPluginJson } from '../../config/utils/pluginValidation';
|
||||
import {
|
||||
getJobFolder,
|
||||
writeJobStats,
|
||||
getCiFolder,
|
||||
getPluginBuildInfo,
|
||||
getPullRequestNumber,
|
||||
getCircleDownloadBaseURL,
|
||||
} from '../../plugins/env';
|
||||
import { buildManifest, signManifest, saveManifest } from '../../plugins/manifest';
|
||||
import { PluginPackageDetails, PluginBuildReport } from '../../plugins/types';
|
||||
import { getPackageDetails, getGrafanaVersions, readGitLog } from '../../plugins/utils';
|
||||
import { agregateWorkflowInfo, agregateCoverageInfo, agregateTestInfo } from '../../plugins/workflow';
|
||||
|
||||
import { pluginBuildRunner } from './plugin.build';
|
||||
import { Task, TaskRunner } from './task';
|
||||
const rimraf = promisify(rimrafCallback);
|
||||
|
||||
export interface PluginCIOptions {
|
||||
finish?: boolean;
|
||||
upload?: boolean;
|
||||
signatureType?: string;
|
||||
rootUrls?: string[];
|
||||
maxJestWorkers?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* 1. BUILD
|
||||
*
|
||||
* when platform exists it is building backend, otherwise frontend
|
||||
*
|
||||
* Each build writes data:
|
||||
* ~/ci/jobs/build_xxx/
|
||||
*
|
||||
* Anything that should be put into the final zip file should be put in:
|
||||
* ~/ci/jobs/build_xxx/dist
|
||||
*
|
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system
|
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work
|
||||
* should be defined in the grafana build pipeline tool or drone configs directly.
|
||||
*/
|
||||
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ finish, maxJestWorkers }) => {
|
||||
const start = Date.now();
|
||||
|
||||
if (finish) {
|
||||
const workDir = getJobFolder();
|
||||
await rimraf(workDir);
|
||||
fs.mkdirSync(workDir);
|
||||
|
||||
// Move local folders to the scoped job folder
|
||||
for (const name of ['dist', 'coverage']) {
|
||||
const dir = path.resolve(process.cwd(), name);
|
||||
if (fs.existsSync(dir)) {
|
||||
fs.moveSync(dir, path.resolve(workDir, name));
|
||||
}
|
||||
}
|
||||
writeJobStats(start, workDir);
|
||||
} else {
|
||||
// Do regular build process with coverage
|
||||
await pluginBuildRunner({ coverage: true, maxJestWorkers });
|
||||
}
|
||||
};
|
||||
|
||||
export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', buildPluginRunner);
|
||||
|
||||
/**
|
||||
* 2. Package
|
||||
*
|
||||
* Take everything from `~/ci/job/{any}/dist` and
|
||||
* 1. merge it into: `~/ci/dist`
|
||||
* 2. zip it into packages in `~/ci/packages`
|
||||
* 3. prepare grafana environment in: `~/ci/grafana-test-env`
|
||||
*
|
||||
*
|
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system
|
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work
|
||||
* should be defined in the grafana build pipeline tool or drone configs directly.
|
||||
*/
|
||||
const packagePluginRunner: TaskRunner<PluginCIOptions> = async ({ signatureType, rootUrls }) => {
|
||||
const start = Date.now();
|
||||
const ciDir = getCiFolder();
|
||||
const packagesDir = path.resolve(ciDir, 'packages');
|
||||
const distDir = path.resolve(ciDir, 'dist');
|
||||
const docsDir = path.resolve(ciDir, 'docs');
|
||||
const jobsDir = path.resolve(ciDir, 'jobs');
|
||||
|
||||
fs.exists(jobsDir, (jobsDirExists) => {
|
||||
if (!jobsDirExists) {
|
||||
throw new Error('You must run plugin:ci-build prior to running plugin:ci-package');
|
||||
}
|
||||
});
|
||||
|
||||
const grafanaEnvDir = path.resolve(ciDir, 'grafana-test-env');
|
||||
await execa('rimraf', [packagesDir, distDir, grafanaEnvDir]);
|
||||
fs.mkdirSync(packagesDir);
|
||||
fs.mkdirSync(distDir);
|
||||
|
||||
// Updating the dist dir to have a pluginId named directory in it
|
||||
// The zip needs to contain the plugin code wrapped in directory with a pluginId name
|
||||
const distContentDir = path.resolve(distDir, getPluginId());
|
||||
fs.mkdirSync(grafanaEnvDir);
|
||||
|
||||
console.log('Build Dist Folder');
|
||||
|
||||
// 1. Check for a local 'dist' folder
|
||||
const d = path.resolve(process.cwd(), 'dist');
|
||||
if (fs.existsSync(d)) {
|
||||
await execa('cp', ['-rn', d + '/.', distContentDir]);
|
||||
}
|
||||
|
||||
// 2. Look for any 'dist' folders under ci/job/XXX/dist
|
||||
const dirs = fs.readdirSync(path.resolve(ciDir, 'jobs'));
|
||||
for (const j of dirs) {
|
||||
const contents = path.resolve(ciDir, 'jobs', j, 'dist');
|
||||
if (fs.existsSync(contents)) {
|
||||
try {
|
||||
await execa('cp', ['-rn', contents + '/.', distContentDir]);
|
||||
} catch (er) {
|
||||
throw new Error('Duplicate files found in dist folders');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
console.log('Save the source info in plugin.json');
|
||||
const pluginJsonFile = path.resolve(distContentDir, 'plugin.json');
|
||||
const pluginInfo = getPluginJson(pluginJsonFile);
|
||||
pluginInfo.info.build = await getPluginBuildInfo();
|
||||
fs.writeFileSync(pluginJsonFile, JSON.stringify(pluginInfo, null, 2), { encoding: 'utf-8' });
|
||||
|
||||
// Write a MANIFEST.txt file in the dist folder
|
||||
try {
|
||||
const manifest = await buildManifest(distContentDir);
|
||||
if (signatureType) {
|
||||
manifest.signatureType = signatureType;
|
||||
}
|
||||
if (rootUrls && rootUrls.length > 0) {
|
||||
rootUrls.forEach(assertRootUrlIsValid);
|
||||
manifest.rootUrls = rootUrls;
|
||||
}
|
||||
const signedManifest = await signManifest(manifest);
|
||||
await saveManifest(distContentDir, signedManifest);
|
||||
} catch (err) {
|
||||
console.warn(`Error signing manifest: ${distContentDir}`, err);
|
||||
}
|
||||
|
||||
console.log('Building ZIP');
|
||||
let zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip';
|
||||
let zipFile = path.resolve(packagesDir, zipName);
|
||||
await execa('zip', ['-r', zipFile, '.'], { cwd: distDir });
|
||||
|
||||
const zipStats = fs.statSync(zipFile);
|
||||
if (zipStats.size < 100) {
|
||||
throw new Error('Invalid zip file: ' + zipFile);
|
||||
}
|
||||
|
||||
// Make a copy so it is easy for report to read
|
||||
await execa('cp', [pluginJsonFile, distDir]);
|
||||
|
||||
const info: PluginPackageDetails = {
|
||||
plugin: await getPackageDetails(zipFile, distDir),
|
||||
};
|
||||
|
||||
console.log('Setup Grafana Environment');
|
||||
let p = path.resolve(grafanaEnvDir, 'plugins', pluginInfo.id);
|
||||
fs.mkdirSync(p, { recursive: true });
|
||||
await execa('unzip', [zipFile, '-d', p]);
|
||||
|
||||
// If docs exist, zip them into packages
|
||||
if (fs.existsSync(docsDir)) {
|
||||
console.log('Creating documentation zip');
|
||||
zipName = pluginInfo.id + '-' + pluginInfo.info.version + '-docs.zip';
|
||||
zipFile = path.resolve(packagesDir, zipName);
|
||||
await execa('zip', ['-r', zipFile, '.'], { cwd: docsDir });
|
||||
|
||||
info.docs = await getPackageDetails(zipFile, docsDir);
|
||||
}
|
||||
|
||||
p = path.resolve(packagesDir, 'info.json');
|
||||
fs.writeFileSync(p, JSON.stringify(info, null, 2), { encoding: 'utf-8' });
|
||||
|
||||
// Write the custom settings
|
||||
p = path.resolve(grafanaEnvDir, 'custom.ini');
|
||||
const customIniBody =
|
||||
`# Autogenerated by @grafana/toolkit \n` +
|
||||
`[paths] \n` +
|
||||
`plugins = ${path.resolve(grafanaEnvDir, 'plugins')}\n` +
|
||||
`\n`; // empty line
|
||||
fs.writeFileSync(p, customIniBody, { encoding: 'utf-8' });
|
||||
|
||||
writeJobStats(start, getJobFolder());
|
||||
};
|
||||
|
||||
export const ciPackagePluginTask = new Task<PluginCIOptions>('Bundle Plugin', packagePluginRunner);
|
||||
|
||||
/**
|
||||
* 4. Report
|
||||
*
|
||||
* Create a report from all the previous steps
|
||||
*
|
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system
|
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work
|
||||
* should be defined in the grafana build pipeline tool or drone configs directly.
|
||||
*/
|
||||
const pluginReportRunner: TaskRunner<PluginCIOptions> = async ({ upload }) => {
|
||||
const ciDir = path.resolve(process.cwd(), 'ci');
|
||||
const packageDir = path.resolve(ciDir, 'packages');
|
||||
const packageInfo = require(path.resolve(packageDir, 'info.json')) as PluginPackageDetails;
|
||||
|
||||
const pluginJsonFile = path.resolve(ciDir, 'dist', 'plugin.json');
|
||||
console.log('Load info from: ' + pluginJsonFile);
|
||||
|
||||
const pluginMeta = getPluginJson(pluginJsonFile);
|
||||
const report: PluginBuildReport = {
|
||||
plugin: pluginMeta,
|
||||
packages: packageInfo,
|
||||
workflow: agregateWorkflowInfo(),
|
||||
coverage: agregateCoverageInfo(),
|
||||
tests: agregateTestInfo(),
|
||||
artifactsBaseURL: await getCircleDownloadBaseURL(),
|
||||
grafanaVersion: getGrafanaVersions(),
|
||||
git: await readGitLog(),
|
||||
};
|
||||
const pr = getPullRequestNumber();
|
||||
if (pr) {
|
||||
report.pullRequest = pr;
|
||||
}
|
||||
|
||||
// Save the report to disk
|
||||
const file = path.resolve(ciDir, 'report.json');
|
||||
fs.writeFileSync(file, JSON.stringify(report, null, 2), { encoding: 'utf-8' });
|
||||
|
||||
const GRAFANA_API_KEY = process.env.GRAFANA_API_KEY;
|
||||
if (!GRAFANA_API_KEY) {
|
||||
console.log('Enter a GRAFANA_API_KEY to upload the plugin report');
|
||||
return;
|
||||
}
|
||||
const url = `https://grafana.com/api/plugins/${report.plugin.id}/ci`;
|
||||
|
||||
console.log('Sending report to:', url);
|
||||
const axios = require('axios');
|
||||
const info = await axios.post(url, report, {
|
||||
headers: { Authorization: 'Bearer ' + GRAFANA_API_KEY },
|
||||
});
|
||||
if (info.status === 200) {
|
||||
console.log('OK: ', info.data);
|
||||
} else {
|
||||
console.warn('Error: ', info);
|
||||
}
|
||||
};
|
||||
|
||||
export const ciPluginReportTask = new Task<PluginCIOptions>('Generate Plugin Report', pluginReportRunner);
|
@ -1,24 +0,0 @@
|
||||
import fs = require('fs');
|
||||
import path = require('path');
|
||||
|
||||
import { useSpinner } from '../utils/useSpinner';
|
||||
|
||||
import { Task, TaskRunner } from './task';
|
||||
|
||||
interface UpdatePluginTask {}
|
||||
|
||||
const updateCiConfig = () =>
|
||||
useSpinner('Updating CircleCI config', async () => {
|
||||
const ciConfigPath = path.join(process.cwd(), '.circleci');
|
||||
if (!fs.existsSync(ciConfigPath)) {
|
||||
fs.mkdirSync(ciConfigPath);
|
||||
}
|
||||
|
||||
const sourceFile = require.resolve('@grafana/toolkit/config/circleci/config.yml');
|
||||
const destFile = path.join(ciConfigPath, 'config.yml');
|
||||
fs.copyFileSync(sourceFile, destFile);
|
||||
});
|
||||
|
||||
const pluginUpdateRunner: TaskRunner<UpdatePluginTask> = () => updateCiConfig();
|
||||
|
||||
export const pluginUpdateTask = new Task<UpdatePluginTask>('Update Plugin', pluginUpdateRunner);
|
@ -1,34 +0,0 @@
|
||||
import execa = require('execa');
|
||||
import { promises as fs } from 'fs';
|
||||
|
||||
import { Task, TaskRunner } from '../task';
|
||||
|
||||
interface BundeManagedOptions {}
|
||||
|
||||
const MANAGED_PLUGINS_PATH = `${process.cwd()}/plugins-bundled`;
|
||||
const MANAGED_PLUGINS_SCOPES = ['internal', 'external'];
|
||||
|
||||
const bundleManagedPluginsRunner: TaskRunner<BundeManagedOptions> = async () => {
|
||||
await Promise.all(
|
||||
MANAGED_PLUGINS_SCOPES.map(async (scope) => {
|
||||
try {
|
||||
const plugins = await fs.readdir(`${MANAGED_PLUGINS_PATH}/${scope}`);
|
||||
if (plugins.length > 0) {
|
||||
for (const plugin of plugins) {
|
||||
try {
|
||||
console.log(`[${scope}]: ${plugin} building...`);
|
||||
await execa('yarn', ['build'], { cwd: `${MANAGED_PLUGINS_PATH}/${scope}/${plugin}` });
|
||||
console.log(`[${scope}]: ${plugin} bundled`);
|
||||
} catch (e: any) {
|
||||
console.log(e.stdout);
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log(e);
|
||||
}
|
||||
})
|
||||
);
|
||||
};
|
||||
|
||||
export const bundleManagedTask = new Task<BundeManagedOptions>('Bundle managed plugins', bundleManagedPluginsRunner);
|
@ -1,106 +1,6 @@
|
||||
import execa from 'execa';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { PluginBuildInfo } from '@grafana/data';
|
||||
|
||||
import { JobInfo } from './types';
|
||||
|
||||
const getJobFromProcessArgv = () => {
|
||||
const arg = process.argv[2];
|
||||
if (arg && arg.startsWith('plugin:ci-')) {
|
||||
const task = arg.substring('plugin:ci-'.length);
|
||||
if ('build' === task) {
|
||||
if ('--backend' === process.argv[3] && process.argv[4]) {
|
||||
return task + '_' + process.argv[4];
|
||||
}
|
||||
return 'build_plugin';
|
||||
}
|
||||
return task;
|
||||
}
|
||||
return 'unknown_job';
|
||||
};
|
||||
|
||||
export const job =
|
||||
(process.env.DRONE_STEP_NAME ? process.env.DRONE_STEP_NAME : process.env.CIRCLE_JOB) || getJobFromProcessArgv();
|
||||
|
||||
export const getPluginBuildInfo = async (): Promise<PluginBuildInfo> => {
|
||||
if (process.env.CI === 'true') {
|
||||
let repo: string | undefined;
|
||||
let branch: string | undefined;
|
||||
let hash: string | undefined;
|
||||
let build: number | undefined;
|
||||
let pr: number | undefined;
|
||||
if (process.env.DRONE === 'true') {
|
||||
repo = process.env.DRONE_REPO_LINK;
|
||||
branch = process.env.DRONE_BRANCH;
|
||||
hash = process.env.DRONE_COMMIT_SHA;
|
||||
build = parseInt(process.env.DRONE_BUILD_NUMBER || '', 10);
|
||||
pr = parseInt(process.env.DRONE_PULL_REQUEST || '', 10);
|
||||
} else if (process.env.CIRCLECI === 'true') {
|
||||
repo = process.env.CIRCLE_REPOSITORY_URL;
|
||||
branch = process.env.CIRCLE_BRANCH;
|
||||
hash = process.env.CIRCLE_SHA1;
|
||||
build = parseInt(process.env.CIRCLE_BUILD_NUM || '', 10);
|
||||
const url = process.env.CIRCLE_PULL_REQUEST || '';
|
||||
const idx = url.lastIndexOf('/') + 1;
|
||||
pr = parseInt(url.substring(idx), 10);
|
||||
}
|
||||
|
||||
const info: PluginBuildInfo = {
|
||||
time: Date.now(),
|
||||
repo,
|
||||
branch,
|
||||
hash,
|
||||
};
|
||||
if (pr) {
|
||||
info.pr = pr;
|
||||
}
|
||||
if (build) {
|
||||
info.number = build;
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
const branch = await execa('git', ['rev-parse', '--abbrev-ref', 'HEAD']);
|
||||
const hash = await execa('git', ['rev-parse', 'HEAD']);
|
||||
return {
|
||||
time: Date.now(),
|
||||
branch: branch.stdout,
|
||||
hash: hash.stdout,
|
||||
};
|
||||
};
|
||||
|
||||
export const getBuildNumber = (): number | undefined => {
|
||||
if (process.env.DRONE === 'true') {
|
||||
return parseInt(process.env.DRONE_BUILD_NUMBER || '', 10);
|
||||
} else if (process.env.CIRCLECI === 'true') {
|
||||
return parseInt(process.env.CIRCLE_BUILD_NUM || '', 10);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const getPullRequestNumber = (): number | undefined => {
|
||||
if (process.env.DRONE === 'true') {
|
||||
return parseInt(process.env.DRONE_PULL_REQUEST || '', 10);
|
||||
} else if (process.env.CIRCLECI === 'true') {
|
||||
const url = process.env.CIRCLE_PULL_REQUEST || '';
|
||||
const idx = url.lastIndexOf('/') + 1;
|
||||
return parseInt(url.substring(idx), 10);
|
||||
}
|
||||
|
||||
return undefined;
|
||||
};
|
||||
|
||||
export const getJobFolder = () => {
|
||||
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job);
|
||||
if (!fs.existsSync(dir)) {
|
||||
fs.mkdirSync(dir, { recursive: true });
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
export const getCiFolder = () => {
|
||||
const dir = path.resolve(process.cwd(), 'ci');
|
||||
if (!fs.existsSync(dir)) {
|
||||
@ -108,40 +8,3 @@ export const getCiFolder = () => {
|
||||
}
|
||||
return dir;
|
||||
};
|
||||
|
||||
export const writeJobStats = (startTime: number, workDir: string) => {
|
||||
const endTime = Date.now();
|
||||
const stats: JobInfo = {
|
||||
job,
|
||||
startTime,
|
||||
endTime,
|
||||
elapsed: endTime - startTime,
|
||||
buildNumber: getBuildNumber(),
|
||||
};
|
||||
const f = path.resolve(workDir, 'job.json');
|
||||
fs.writeFile(f, JSON.stringify(stats, null, 2), (err) => {
|
||||
if (err) {
|
||||
throw new Error('Unable to stats: ' + f);
|
||||
}
|
||||
});
|
||||
};
|
||||
|
||||
// https://circleci.com/api/v1.1/project/github/NatelEnergy/grafana-discrete-panel/latest/artifacts
|
||||
export async function getCircleDownloadBaseURL(): Promise<string | undefined> {
|
||||
try {
|
||||
const axios = require('axios');
|
||||
const repo = process.env.CIRCLE_PROJECT_REPONAME;
|
||||
const user = process.env.CIRCLE_PROJECT_USERNAME;
|
||||
let url = `https://circleci.com/api/v1.1/project/github/${user}/${repo}/latest/artifacts`;
|
||||
const rsp = await axios.get(url);
|
||||
for (const s of rsp.data) {
|
||||
const { path, url } = s;
|
||||
if (url && path && path.endsWith('report.json')) {
|
||||
return url.substring(url.length - 'report.json'.length);
|
||||
}
|
||||
}
|
||||
} catch (e) {
|
||||
console.log('Error reading CircleCI artifact URL', e);
|
||||
}
|
||||
return undefined;
|
||||
}
|
||||
|
@ -1,4 +1 @@
|
||||
export * from './env';
|
||||
export * from './utils';
|
||||
export * from './workflow';
|
||||
export * from './types';
|
||||
|
@ -1,91 +0,0 @@
|
||||
import crypto from 'crypto';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { ManifestInfo } from './types';
|
||||
|
||||
const MANIFEST_FILE = 'MANIFEST.txt';
|
||||
|
||||
async function* walk(dir: string, baseDir: string): AsyncGenerator<string, any, any> {
|
||||
for await (const d of await (fs.promises as any).opendir(dir)) {
|
||||
const entry = path.posix.join(dir, d.name);
|
||||
if (d.isDirectory()) {
|
||||
yield* await walk(entry, baseDir);
|
||||
} else if (d.isFile()) {
|
||||
yield path.posix.relative(baseDir, entry);
|
||||
} else if (d.isSymbolicLink()) {
|
||||
const realPath = await (fs.promises as any).realpath(entry);
|
||||
if (!realPath.startsWith(baseDir)) {
|
||||
throw new Error(
|
||||
`symbolic link ${path.posix.relative(
|
||||
baseDir,
|
||||
entry
|
||||
)} targets a file outside of the base directory: ${baseDir}`
|
||||
);
|
||||
}
|
||||
// if resolved symlink target is a file include it in the manifest
|
||||
const stats = await (fs.promises as any).stat(realPath);
|
||||
if (stats.isFile()) {
|
||||
yield path.posix.relative(baseDir, entry);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export async function buildManifest(dir: string): Promise<ManifestInfo> {
|
||||
const pluginJson = JSON.parse(fs.readFileSync(path.join(dir, 'plugin.json'), { encoding: 'utf8' }));
|
||||
|
||||
const manifest = {
|
||||
plugin: pluginJson.id,
|
||||
version: pluginJson.info.version,
|
||||
files: {},
|
||||
} as ManifestInfo;
|
||||
|
||||
for await (const p of await walk(dir, dir)) {
|
||||
if (p === MANIFEST_FILE) {
|
||||
continue;
|
||||
}
|
||||
|
||||
manifest.files[p] = crypto
|
||||
.createHash('sha256')
|
||||
.update(fs.readFileSync(path.join(dir, p)))
|
||||
.digest('hex');
|
||||
}
|
||||
|
||||
return manifest;
|
||||
}
|
||||
|
||||
export async function signManifest(manifest: ManifestInfo): Promise<string> {
|
||||
const GRAFANA_API_KEY = process.env.GRAFANA_API_KEY;
|
||||
if (!GRAFANA_API_KEY) {
|
||||
throw new Error('You must enter a GRAFANA_API_KEY to sign the plugin manifest');
|
||||
}
|
||||
|
||||
const GRAFANA_COM_URL = process.env.GRAFANA_COM_URL || 'https://grafana.com/api';
|
||||
const url = GRAFANA_COM_URL + '/plugins/ci/sign';
|
||||
|
||||
const axios = require('axios');
|
||||
|
||||
try {
|
||||
const info = await axios.post(url, manifest, {
|
||||
headers: { Authorization: 'Bearer ' + GRAFANA_API_KEY },
|
||||
});
|
||||
if (info.status !== 200) {
|
||||
console.warn('Error: ', info);
|
||||
throw new Error('Error signing manifest');
|
||||
}
|
||||
|
||||
return info.data;
|
||||
} catch (err: any) {
|
||||
if (err.response?.data?.message) {
|
||||
throw new Error('Error signing manifest: ' + err.response.data.message);
|
||||
}
|
||||
|
||||
throw new Error('Error signing manifest: ' + err.message);
|
||||
}
|
||||
}
|
||||
|
||||
export async function saveManifest(dir: string, signedManifest: string): Promise<boolean> {
|
||||
fs.writeFileSync(path.join(dir, MANIFEST_FILE), signedManifest);
|
||||
return true;
|
||||
}
|
@ -1,104 +0,0 @@
|
||||
import { PluginMeta, KeyValue } from '@grafana/data';
|
||||
|
||||
export interface PluginPackageDetails {
|
||||
plugin: ZipFileInfo;
|
||||
docs?: ZipFileInfo;
|
||||
}
|
||||
|
||||
export interface PluginBuildReport {
|
||||
plugin: PluginMeta;
|
||||
packages: PluginPackageDetails;
|
||||
workflow: WorkflowInfo;
|
||||
coverage: CoverageInfo[];
|
||||
tests: TestResultsInfo[];
|
||||
git?: GitLogInfo;
|
||||
pullRequest?: number;
|
||||
artifactsBaseURL?: string;
|
||||
grafanaVersion?: KeyValue<string>;
|
||||
}
|
||||
|
||||
export interface JobInfo {
|
||||
job?: string;
|
||||
startTime: number;
|
||||
endTime: number;
|
||||
elapsed: number;
|
||||
status?: string;
|
||||
buildNumber?: number;
|
||||
}
|
||||
|
||||
export interface WorkflowInfo extends JobInfo {
|
||||
workflowId?: string;
|
||||
jobs: JobInfo[];
|
||||
user?: string;
|
||||
repo?: string;
|
||||
}
|
||||
|
||||
export interface CoverageDetails {
|
||||
total: number;
|
||||
covered: number;
|
||||
skipped: number;
|
||||
pct: number;
|
||||
}
|
||||
|
||||
export interface CoverageInfo {
|
||||
job: string;
|
||||
summary: { [key: string]: CoverageDetails };
|
||||
report?: string; // path to report
|
||||
}
|
||||
|
||||
export interface TestResultsInfo {
|
||||
job: string;
|
||||
grafana?: any;
|
||||
error?: string;
|
||||
passed: number;
|
||||
failed: number;
|
||||
screenshots: string[];
|
||||
}
|
||||
|
||||
export interface CountAndSize {
|
||||
count: number;
|
||||
bytes: number;
|
||||
}
|
||||
|
||||
export interface ExtensionSize {
|
||||
[key: string]: CountAndSize;
|
||||
}
|
||||
|
||||
export interface ZipFileInfo {
|
||||
name: string;
|
||||
size: number;
|
||||
contents: ExtensionSize;
|
||||
sha1?: string;
|
||||
md5?: string;
|
||||
}
|
||||
|
||||
interface UserInfo {
|
||||
name: string;
|
||||
email: string;
|
||||
time?: number;
|
||||
}
|
||||
|
||||
export interface GitLogInfo {
|
||||
commit: string;
|
||||
tree: string;
|
||||
subject: string;
|
||||
body?: string;
|
||||
notes?: string;
|
||||
author: UserInfo;
|
||||
commiter: UserInfo;
|
||||
}
|
||||
|
||||
export interface ManifestInfo {
|
||||
// time: number; << filled in by the server
|
||||
// keyId: string; << filled in by the server
|
||||
// signedByOrg: string; << filled in by the server
|
||||
// signedByOrgName: string; << filled in by the server
|
||||
signatureType?: string; // filled in by the server if not specified
|
||||
rootUrls?: string[]; // for private signatures
|
||||
plugin: string;
|
||||
version: string;
|
||||
files: Record<string, string>;
|
||||
toolkit?: {
|
||||
version: string;
|
||||
};
|
||||
}
|
@ -1,132 +0,0 @@
|
||||
import execa from 'execa';
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { KeyValue } from '@grafana/data';
|
||||
|
||||
import { ExtensionSize, ZipFileInfo, GitLogInfo } from './types';
|
||||
|
||||
const md5File = require('md5-file');
|
||||
|
||||
export function getGrafanaVersions(): KeyValue<string> {
|
||||
const dir = path.resolve(process.cwd(), 'node_modules', '@grafana');
|
||||
const versions: KeyValue = {};
|
||||
try {
|
||||
fs.readdirSync(dir).forEach((file) => {
|
||||
const json = require(path.resolve(dir, file, 'package.json'));
|
||||
versions[file] = json.version;
|
||||
});
|
||||
} catch (err) {
|
||||
console.warn('Error reading toolkit versions', err);
|
||||
}
|
||||
return versions;
|
||||
}
|
||||
|
||||
export function getFileSizeReportInFolder(dir: string, info?: ExtensionSize): ExtensionSize {
|
||||
const acc: ExtensionSize = info ? info : {};
|
||||
|
||||
const files = fs.readdirSync(dir);
|
||||
if (files) {
|
||||
files.forEach((file) => {
|
||||
const newbase = path.join(dir, file);
|
||||
const stat = fs.statSync(newbase);
|
||||
if (stat.isDirectory()) {
|
||||
getFileSizeReportInFolder(newbase, info);
|
||||
} else {
|
||||
let ext = '_none_';
|
||||
const idx = file.lastIndexOf('.');
|
||||
if (idx > 0) {
|
||||
ext = file.substring(idx + 1).toLowerCase();
|
||||
}
|
||||
const current = acc[ext];
|
||||
if (current) {
|
||||
current.count += 1;
|
||||
current.bytes += stat.size;
|
||||
} else {
|
||||
acc[ext] = { bytes: stat.size, count: 1 };
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
return acc;
|
||||
}
|
||||
|
||||
export async function getPackageDetails(zipFile: string, zipSrc: string, writeChecksum = true): Promise<ZipFileInfo> {
|
||||
const zipStats = fs.statSync(zipFile);
|
||||
if (zipStats.size < 100) {
|
||||
throw new Error('Invalid zip file: ' + zipFile);
|
||||
}
|
||||
const info: ZipFileInfo = {
|
||||
name: path.basename(zipFile),
|
||||
size: zipStats.size,
|
||||
contents: getFileSizeReportInFolder(zipSrc),
|
||||
};
|
||||
try {
|
||||
const exe = await execa('shasum', [zipFile]);
|
||||
const idx = exe.stdout.indexOf(' ');
|
||||
const sha1 = exe.stdout.substring(0, idx);
|
||||
if (writeChecksum) {
|
||||
fs.writeFile(zipFile + '.sha1', sha1, (err) => {});
|
||||
}
|
||||
info.sha1 = sha1;
|
||||
} catch {
|
||||
console.warn('Unable to read SHA1 Checksum');
|
||||
}
|
||||
try {
|
||||
info.md5 = md5File.sync(zipFile);
|
||||
} catch {
|
||||
console.warn('Unable to read MD5 Checksum');
|
||||
}
|
||||
return info;
|
||||
}
|
||||
|
||||
export function findImagesInFolder(dir: string, prefix = '', append?: string[]): string[] {
|
||||
const imgs = append || [];
|
||||
|
||||
const files = fs.readdirSync(dir);
|
||||
if (files) {
|
||||
files.forEach((file) => {
|
||||
if (file.endsWith('.png')) {
|
||||
imgs.push(file);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return imgs;
|
||||
}
|
||||
|
||||
export async function readGitLog(): Promise<GitLogInfo | undefined> {
|
||||
try {
|
||||
let exe = await execa('git', [
|
||||
'log',
|
||||
'-1', // last line
|
||||
'--pretty=format:{%n "commit": "%H",%n "tree": "%T",%n "subject": "%s",%n "author": {%n "name": "%aN",%n "email": "%aE",%n "time":"%at" },%n "commiter": {%n "name": "%cN",%n "email": "%cE",%n "time":"%ct" }%n}',
|
||||
]);
|
||||
const info = JSON.parse(exe.stdout) as GitLogInfo;
|
||||
|
||||
// Read the body
|
||||
exe = await execa('git', [
|
||||
'log',
|
||||
'-1', // last line
|
||||
'--pretty=format:%b', // Just the body (with newlines!)
|
||||
]);
|
||||
if (exe.stdout && exe.stdout.length) {
|
||||
info.body = exe.stdout.trim();
|
||||
}
|
||||
|
||||
// Read any commit notes
|
||||
exe = await execa('git', [
|
||||
'log',
|
||||
'-1', // last line
|
||||
'--pretty=format:%N', // commit notes (with newlines!)
|
||||
]);
|
||||
if (exe.stdout && exe.stdout.length) {
|
||||
info.notes = exe.stdout.trim();
|
||||
}
|
||||
|
||||
return info;
|
||||
} catch (err) {
|
||||
console.warn('Error REading Git log info', err);
|
||||
}
|
||||
return undefined;
|
||||
}
|
@ -1,100 +0,0 @@
|
||||
import fs from 'fs';
|
||||
import path from 'path';
|
||||
|
||||
import { getBuildNumber, getCiFolder } from './env';
|
||||
import { JobInfo, WorkflowInfo, CoverageInfo, TestResultsInfo } from './types';
|
||||
|
||||
export const agregateWorkflowInfo = (): WorkflowInfo => {
|
||||
const now = Date.now();
|
||||
const workflow: WorkflowInfo = {
|
||||
jobs: [],
|
||||
startTime: now,
|
||||
endTime: now,
|
||||
workflowId: process.env.CIRCLE_WORKFLOW_ID,
|
||||
repo: process.env.CIRCLE_PROJECT_REPONAME,
|
||||
user: process.env.CIRCLE_PROJECT_USERNAME,
|
||||
buildNumber: getBuildNumber(),
|
||||
elapsed: 0,
|
||||
};
|
||||
|
||||
const jobsFolder = path.resolve(getCiFolder(), 'jobs');
|
||||
if (fs.existsSync(jobsFolder)) {
|
||||
const files = fs.readdirSync(jobsFolder);
|
||||
if (files && files.length) {
|
||||
files.forEach((file) => {
|
||||
const p = path.resolve(jobsFolder, file, 'job.json');
|
||||
if (fs.existsSync(p)) {
|
||||
const job = require(p) as JobInfo;
|
||||
workflow.jobs.push(job);
|
||||
if (job.startTime < workflow.startTime) {
|
||||
workflow.startTime = job.startTime;
|
||||
}
|
||||
if (job.endTime > workflow.endTime) {
|
||||
workflow.endTime = job.endTime;
|
||||
}
|
||||
} else {
|
||||
console.log('Missing Job info: ', p);
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('NO JOBS IN: ', jobsFolder);
|
||||
}
|
||||
}
|
||||
|
||||
workflow.elapsed = workflow.endTime - workflow.startTime;
|
||||
return workflow;
|
||||
};
|
||||
|
||||
export const agregateCoverageInfo = (): CoverageInfo[] => {
|
||||
const coverage: CoverageInfo[] = [];
|
||||
const ciDir = getCiFolder();
|
||||
const jobsFolder = path.resolve(ciDir, 'jobs');
|
||||
if (fs.existsSync(jobsFolder)) {
|
||||
const files = fs.readdirSync(jobsFolder);
|
||||
if (files && files.length) {
|
||||
files.forEach((file) => {
|
||||
const dir = path.resolve(jobsFolder, file, 'coverage');
|
||||
if (fs.existsSync(dir)) {
|
||||
const s = path.resolve(dir, 'coverage-summary.json');
|
||||
const r = path.resolve(dir, 'lcov-report', 'index.html');
|
||||
if (fs.existsSync(s)) {
|
||||
const raw = require(s);
|
||||
const info: CoverageInfo = {
|
||||
job: file,
|
||||
summary: raw.total,
|
||||
};
|
||||
if (fs.existsSync(r)) {
|
||||
info.report = r.substring(ciDir.length);
|
||||
}
|
||||
coverage.push(info);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('NO JOBS IN: ', jobsFolder);
|
||||
}
|
||||
}
|
||||
return coverage;
|
||||
};
|
||||
|
||||
export const agregateTestInfo = (): TestResultsInfo[] => {
|
||||
const tests: TestResultsInfo[] = [];
|
||||
const ciDir = getCiFolder();
|
||||
const jobsFolder = path.resolve(ciDir, 'jobs');
|
||||
if (fs.existsSync(jobsFolder)) {
|
||||
const files = fs.readdirSync(jobsFolder);
|
||||
if (files && files.length) {
|
||||
files.forEach((file) => {
|
||||
if (file.startsWith('test')) {
|
||||
const summary = path.resolve(jobsFolder, file, 'results.json');
|
||||
if (fs.existsSync(summary)) {
|
||||
tests.push(require(summary) as TestResultsInfo);
|
||||
}
|
||||
}
|
||||
});
|
||||
} else {
|
||||
console.log('NO Jobs IN: ', jobsFolder);
|
||||
}
|
||||
}
|
||||
return tests;
|
||||
};
|
10
yarn.lock
10
yarn.lock
@ -3436,7 +3436,6 @@ __metadata:
|
||||
less: ^4.1.2
|
||||
less-loader: ^10.2.0
|
||||
lodash: ^4.17.21
|
||||
md5-file: ^5.0.0
|
||||
mini-css-extract-plugin: ^2.6.0
|
||||
ora: ^5.4.1
|
||||
postcss: ^8.4.12
|
||||
@ -25384,15 +25383,6 @@ __metadata:
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"md5-file@npm:^5.0.0":
|
||||
version: 5.0.0
|
||||
resolution: "md5-file@npm:5.0.0"
|
||||
bin:
|
||||
md5-file: cli.js
|
||||
checksum: c606a00ff58adf5428e8e2f36d86e5d3c7029f9688126faca302cd83b5e92cac183a62e1d1f05fae7c2614e80f993326fd0a8d6a3a913c41ec7ea0eefc25aa76
|
||||
languageName: node
|
||||
linkType: hard
|
||||
|
||||
"mdast-squeeze-paragraphs@npm:^4.0.0":
|
||||
version: 4.0.0
|
||||
resolution: "mdast-squeeze-paragraphs@npm:4.0.0"
|
||||
|
Reference in New Issue
Block a user