mirror of https://github.com/grafana/grafana
@grafana/toolkit: integrate latest improvements (#18168)
* @grafana/toolkit: make ts-loader ignore files that are not bundled * @grafana/toolkit: improve the circleci task (#18133) This PR makes some minor improvements to the circle task Adds build info to plugin.json adds dependencies to deployed artifacts Makes sure prettier has content before writing (avoid writing empty files) renames 'bundle' to 'package' and saves stuff in a 'packages' file * @grafana/toolkit: enable plugin themes to work with common stylesheet (#18160) This makes it possible to use themes styleshheet files and stylesheet imports at the same time. The problem occurred when trying to migrate polystat panel to toolkit: grafana/grafana-polystat-panel#62pull/18175/head
parent
8202fa2fde
commit
2c8809d3cf
@ -0,0 +1,214 @@ |
||||
import execa = require('execa'); |
||||
import path = require('path'); |
||||
import fs = require('fs'); |
||||
|
||||
export interface PluginSourceInfo { |
||||
time?: number; |
||||
repo?: string; |
||||
branch?: string; |
||||
hash?: string; |
||||
} |
||||
|
||||
export interface JobInfo { |
||||
job?: string; |
||||
startTime: number; |
||||
endTime: number; |
||||
elapsed: number; |
||||
status?: string; |
||||
buildNumber?: number; |
||||
} |
||||
|
||||
export interface WorkflowInfo extends JobInfo { |
||||
workflowId?: string; |
||||
jobs: JobInfo[]; |
||||
user?: string; |
||||
repo?: string; |
||||
} |
||||
|
||||
const getJobFromProcessArgv = () => { |
||||
const arg = process.argv[2]; |
||||
if (arg && arg.startsWith('plugin:ci-')) { |
||||
const task = arg.substring('plugin:ci-'.length); |
||||
if ('build' === task) { |
||||
if ('--backend' === process.argv[3] && process.argv[4]) { |
||||
return task + '_' + process.argv[4]; |
||||
} |
||||
return 'build_plugin'; |
||||
} |
||||
return task; |
||||
} |
||||
return 'unknown_job'; |
||||
}; |
||||
|
||||
export const job = process.env.CIRCLE_JOB || getJobFromProcessArgv(); |
||||
|
||||
export const getPluginSourceInfo = async (): Promise<PluginSourceInfo> => { |
||||
if (process.env.CIRCLE_SHA1) { |
||||
return Promise.resolve({ |
||||
time: Date.now(), |
||||
repo: process.env.CIRCLE_REPOSITORY_URL, |
||||
branch: process.env.CIRCLE_BRANCH, |
||||
hash: process.env.CIRCLE_SHA1, |
||||
}); |
||||
} |
||||
const exe = await execa('git', ['rev-parse', 'HEAD']); |
||||
return { |
||||
time: Date.now(), |
||||
hash: exe.stdout, |
||||
}; |
||||
}; |
||||
|
||||
const getBuildNumber = (): number | undefined => { |
||||
if (process.env.CIRCLE_BUILD_NUM) { |
||||
return parseInt(process.env.CIRCLE_BUILD_NUM, 10); |
||||
} |
||||
return undefined; |
||||
}; |
||||
|
||||
export const getJobFolder = () => { |
||||
const dir = path.resolve(process.cwd(), 'ci', 'jobs', job); |
||||
if (!fs.existsSync(dir)) { |
||||
fs.mkdirSync(dir, { recursive: true }); |
||||
} |
||||
return dir; |
||||
}; |
||||
|
||||
export const getCiFolder = () => { |
||||
const dir = path.resolve(process.cwd(), 'ci'); |
||||
if (!fs.existsSync(dir)) { |
||||
fs.mkdirSync(dir, { recursive: true }); |
||||
} |
||||
return dir; |
||||
}; |
||||
|
||||
export const writeJobStats = (startTime: number, workDir: string) => { |
||||
const endTime = Date.now(); |
||||
const stats: JobInfo = { |
||||
job, |
||||
startTime, |
||||
endTime, |
||||
elapsed: endTime - startTime, |
||||
buildNumber: getBuildNumber(), |
||||
}; |
||||
const f = path.resolve(workDir, 'job.json'); |
||||
fs.writeFile(f, JSON.stringify(stats, null, 2), err => { |
||||
if (err) { |
||||
throw new Error('Unable to stats: ' + f); |
||||
} |
||||
}); |
||||
}; |
||||
|
||||
export const agregateWorkflowInfo = (): WorkflowInfo => { |
||||
const now = Date.now(); |
||||
const workflow: WorkflowInfo = { |
||||
jobs: [], |
||||
startTime: now, |
||||
endTime: now, |
||||
workflowId: process.env.CIRCLE_WORKFLOW_ID, |
||||
repo: process.env.CIRCLE_PROJECT_REPONAME, |
||||
user: process.env.CIRCLE_PROJECT_USERNAME, |
||||
buildNumber: getBuildNumber(), |
||||
elapsed: 0, |
||||
}; |
||||
|
||||
const jobsFolder = path.resolve(getCiFolder(), 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach(file => { |
||||
const p = path.resolve(jobsFolder, file, 'job.json'); |
||||
if (fs.existsSync(p)) { |
||||
const job = require(p) as JobInfo; |
||||
workflow.jobs.push(job); |
||||
if (job.startTime < workflow.startTime) { |
||||
workflow.startTime = job.startTime; |
||||
} |
||||
if (job.endTime > workflow.endTime) { |
||||
workflow.endTime = job.endTime; |
||||
} |
||||
} else { |
||||
console.log('Missing Job info: ', p); |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO JOBS IN: ', jobsFolder); |
||||
} |
||||
} |
||||
|
||||
workflow.elapsed = workflow.endTime - workflow.startTime; |
||||
return workflow; |
||||
}; |
||||
|
||||
export interface CoverageDetails { |
||||
total: number; |
||||
covered: number; |
||||
skipped: number; |
||||
pct: number; |
||||
} |
||||
|
||||
export interface CoverageInfo { |
||||
job: string; |
||||
summary: { [key: string]: CoverageDetails }; |
||||
report?: string; // path to report
|
||||
} |
||||
|
||||
export const agregateCoverageInfo = (): CoverageInfo[] => { |
||||
const coverage: CoverageInfo[] = []; |
||||
const ciDir = getCiFolder(); |
||||
const jobsFolder = path.resolve(ciDir, 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach(file => { |
||||
const dir = path.resolve(jobsFolder, file, 'coverage'); |
||||
if (fs.existsSync(dir)) { |
||||
const s = path.resolve(dir, 'coverage-summary.json'); |
||||
const r = path.resolve(dir, 'lcov-report', 'index.html'); |
||||
if (fs.existsSync(s)) { |
||||
const raw = require(s); |
||||
const info: CoverageInfo = { |
||||
job: file, |
||||
summary: raw.total, |
||||
}; |
||||
if (fs.existsSync(r)) { |
||||
info.report = r.substring(ciDir.length); |
||||
} |
||||
coverage.push(info); |
||||
} |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO JOBS IN: ', jobsFolder); |
||||
} |
||||
} |
||||
return coverage; |
||||
}; |
||||
|
||||
export interface TestResultInfo { |
||||
job: string; |
||||
grafana?: any; |
||||
status?: string; |
||||
error?: string; |
||||
} |
||||
|
||||
export const agregateTestInfo = (): TestResultInfo[] => { |
||||
const tests: TestResultInfo[] = []; |
||||
const ciDir = getCiFolder(); |
||||
const jobsFolder = path.resolve(ciDir, 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach(file => { |
||||
if (file.startsWith('test')) { |
||||
const summary = path.resolve(jobsFolder, file, 'results.json'); |
||||
if (fs.existsSync(summary)) { |
||||
tests.push(require(summary) as TestResultInfo); |
||||
} |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO Jobs IN: ', jobsFolder); |
||||
} |
||||
} |
||||
return tests; |
||||
}; |
||||
@ -0,0 +1,72 @@ |
||||
import execa = require('execa'); |
||||
import path = require('path'); |
||||
import fs = require('fs'); |
||||
|
||||
interface ExtensionBytes { |
||||
[key: string]: number; |
||||
} |
||||
|
||||
export function getFileSizeReportInFolder(dir: string, info?: ExtensionBytes): ExtensionBytes { |
||||
if (!info) { |
||||
info = {}; |
||||
} |
||||
|
||||
const files = fs.readdirSync(dir); |
||||
if (files) { |
||||
files.forEach(file => { |
||||
const newbase = path.join(dir, file); |
||||
const stat = fs.statSync(newbase); |
||||
if (stat.isDirectory()) { |
||||
getFileSizeReportInFolder(newbase, info); |
||||
} else { |
||||
let ext = '<other>'; |
||||
const idx = file.lastIndexOf('.'); |
||||
if (idx > 0) { |
||||
ext = file.substring(idx + 1).toLowerCase(); |
||||
} |
||||
const current = info![ext] || 0; |
||||
info![ext] = current + stat.size; |
||||
} |
||||
}); |
||||
} |
||||
return info; |
||||
} |
||||
|
||||
interface ZipFileInfo { |
||||
name: string; |
||||
size: number; |
||||
contents: ExtensionBytes; |
||||
sha1?: string; |
||||
md5?: string; |
||||
} |
||||
|
||||
export async function getPackageDetails(zipFile: string, zipSrc: string, writeChecksum = true): Promise<ZipFileInfo> { |
||||
const zipStats = fs.statSync(zipFile); |
||||
if (zipStats.size < 100) { |
||||
throw new Error('Invalid zip file: ' + zipFile); |
||||
} |
||||
const info: ZipFileInfo = { |
||||
name: path.basename(zipFile), |
||||
size: zipStats.size, |
||||
contents: getFileSizeReportInFolder(zipSrc), |
||||
}; |
||||
try { |
||||
const exe = await execa('shasum', [zipFile]); |
||||
const idx = exe.stdout.indexOf(' '); |
||||
const sha1 = exe.stdout.substring(0, idx); |
||||
if (writeChecksum) { |
||||
fs.writeFile(zipFile + '.sha1', sha1, err => {}); |
||||
} |
||||
info.sha1 = sha1; |
||||
} catch { |
||||
console.warn('Unable to read SHA1 Checksum'); |
||||
} |
||||
try { |
||||
const exe = await execa('md5sum', [zipFile]); |
||||
const idx = exe.stdout.indexOf(' '); |
||||
info.md5 = exe.stdout.substring(0, idx); |
||||
} catch { |
||||
console.warn('Unable to read MD5 Checksum'); |
||||
} |
||||
return info; |
||||
} |
||||
Loading…
Reference in new issue