mirror of https://github.com/grafana/grafana
Toolkit: Remove `plugin:ci-build` `plugin:ci-package` `plugin:ci-report` and related files (#67212)
parent
e899d2bc7e
commit
22713186cb
@ -1,258 +0,0 @@ |
||||
import execa = require('execa'); |
||||
import fs from 'fs-extra'; |
||||
import path = require('path'); |
||||
import rimrafCallback from 'rimraf'; |
||||
import { promisify } from 'util'; |
||||
|
||||
import { getPluginId } from '../../config/utils/getPluginId'; |
||||
import { assertRootUrlIsValid, getPluginJson } from '../../config/utils/pluginValidation'; |
||||
import { |
||||
getJobFolder, |
||||
writeJobStats, |
||||
getCiFolder, |
||||
getPluginBuildInfo, |
||||
getPullRequestNumber, |
||||
getCircleDownloadBaseURL, |
||||
} from '../../plugins/env'; |
||||
import { buildManifest, signManifest, saveManifest } from '../../plugins/manifest'; |
||||
import { PluginPackageDetails, PluginBuildReport } from '../../plugins/types'; |
||||
import { getPackageDetails, getGrafanaVersions, readGitLog } from '../../plugins/utils'; |
||||
import { agregateWorkflowInfo, agregateCoverageInfo, agregateTestInfo } from '../../plugins/workflow'; |
||||
|
||||
import { pluginBuildRunner } from './plugin.build'; |
||||
import { Task, TaskRunner } from './task'; |
||||
const rimraf = promisify(rimrafCallback); |
||||
|
||||
export interface PluginCIOptions { |
||||
finish?: boolean; |
||||
upload?: boolean; |
||||
signatureType?: string; |
||||
rootUrls?: string[]; |
||||
maxJestWorkers?: string; |
||||
} |
||||
|
||||
/** |
||||
* 1. BUILD |
||||
* |
||||
* when platform exists it is building backend, otherwise frontend |
||||
* |
||||
* Each build writes data: |
||||
* ~/ci/jobs/build_xxx/ |
||||
* |
||||
* Anything that should be put into the final zip file should be put in: |
||||
* ~/ci/jobs/build_xxx/dist |
||||
* |
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system |
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work |
||||
* should be defined in the grafana build pipeline tool or drone configs directly. |
||||
*/ |
||||
const buildPluginRunner: TaskRunner<PluginCIOptions> = async ({ finish, maxJestWorkers }) => { |
||||
const start = Date.now(); |
||||
|
||||
if (finish) { |
||||
const workDir = getJobFolder(); |
||||
await rimraf(workDir); |
||||
fs.mkdirSync(workDir); |
||||
|
||||
// Move local folders to the scoped job folder
|
||||
for (const name of ['dist', 'coverage']) { |
||||
const dir = path.resolve(process.cwd(), name); |
||||
if (fs.existsSync(dir)) { |
||||
fs.moveSync(dir, path.resolve(workDir, name)); |
||||
} |
||||
} |
||||
writeJobStats(start, workDir); |
||||
} else { |
||||
// Do regular build process with coverage
|
||||
await pluginBuildRunner({ coverage: true, maxJestWorkers }); |
||||
} |
||||
}; |
||||
|
||||
export const ciBuildPluginTask = new Task<PluginCIOptions>('Build Plugin', buildPluginRunner); |
||||
|
||||
/** |
||||
* 2. Package |
||||
* |
||||
* Take everything from `~/ci/job/{any}/dist` and |
||||
* 1. merge it into: `~/ci/dist` |
||||
* 2. zip it into packages in `~/ci/packages` |
||||
* 3. prepare grafana environment in: `~/ci/grafana-test-env` |
||||
* |
||||
* |
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system |
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work |
||||
* should be defined in the grafana build pipeline tool or drone configs directly. |
||||
*/ |
||||
const packagePluginRunner: TaskRunner<PluginCIOptions> = async ({ signatureType, rootUrls }) => { |
||||
const start = Date.now(); |
||||
const ciDir = getCiFolder(); |
||||
const packagesDir = path.resolve(ciDir, 'packages'); |
||||
const distDir = path.resolve(ciDir, 'dist'); |
||||
const docsDir = path.resolve(ciDir, 'docs'); |
||||
const jobsDir = path.resolve(ciDir, 'jobs'); |
||||
|
||||
fs.exists(jobsDir, (jobsDirExists) => { |
||||
if (!jobsDirExists) { |
||||
throw new Error('You must run plugin:ci-build prior to running plugin:ci-package'); |
||||
} |
||||
}); |
||||
|
||||
const grafanaEnvDir = path.resolve(ciDir, 'grafana-test-env'); |
||||
await execa('rimraf', [packagesDir, distDir, grafanaEnvDir]); |
||||
fs.mkdirSync(packagesDir); |
||||
fs.mkdirSync(distDir); |
||||
|
||||
// Updating the dist dir to have a pluginId named directory in it
|
||||
// The zip needs to contain the plugin code wrapped in directory with a pluginId name
|
||||
const distContentDir = path.resolve(distDir, getPluginId()); |
||||
fs.mkdirSync(grafanaEnvDir); |
||||
|
||||
console.log('Build Dist Folder'); |
||||
|
||||
// 1. Check for a local 'dist' folder
|
||||
const d = path.resolve(process.cwd(), 'dist'); |
||||
if (fs.existsSync(d)) { |
||||
await execa('cp', ['-rn', d + '/.', distContentDir]); |
||||
} |
||||
|
||||
// 2. Look for any 'dist' folders under ci/job/XXX/dist
|
||||
const dirs = fs.readdirSync(path.resolve(ciDir, 'jobs')); |
||||
for (const j of dirs) { |
||||
const contents = path.resolve(ciDir, 'jobs', j, 'dist'); |
||||
if (fs.existsSync(contents)) { |
||||
try { |
||||
await execa('cp', ['-rn', contents + '/.', distContentDir]); |
||||
} catch (er) { |
||||
throw new Error('Duplicate files found in dist folders'); |
||||
} |
||||
} |
||||
} |
||||
|
||||
console.log('Save the source info in plugin.json'); |
||||
const pluginJsonFile = path.resolve(distContentDir, 'plugin.json'); |
||||
const pluginInfo = getPluginJson(pluginJsonFile); |
||||
pluginInfo.info.build = await getPluginBuildInfo(); |
||||
fs.writeFileSync(pluginJsonFile, JSON.stringify(pluginInfo, null, 2), { encoding: 'utf-8' }); |
||||
|
||||
// Write a MANIFEST.txt file in the dist folder
|
||||
try { |
||||
const manifest = await buildManifest(distContentDir); |
||||
if (signatureType) { |
||||
manifest.signatureType = signatureType; |
||||
} |
||||
if (rootUrls && rootUrls.length > 0) { |
||||
rootUrls.forEach(assertRootUrlIsValid); |
||||
manifest.rootUrls = rootUrls; |
||||
} |
||||
const signedManifest = await signManifest(manifest); |
||||
await saveManifest(distContentDir, signedManifest); |
||||
} catch (err) { |
||||
console.warn(`Error signing manifest: ${distContentDir}`, err); |
||||
} |
||||
|
||||
console.log('Building ZIP'); |
||||
let zipName = pluginInfo.id + '-' + pluginInfo.info.version + '.zip'; |
||||
let zipFile = path.resolve(packagesDir, zipName); |
||||
await execa('zip', ['-r', zipFile, '.'], { cwd: distDir }); |
||||
|
||||
const zipStats = fs.statSync(zipFile); |
||||
if (zipStats.size < 100) { |
||||
throw new Error('Invalid zip file: ' + zipFile); |
||||
} |
||||
|
||||
// Make a copy so it is easy for report to read
|
||||
await execa('cp', [pluginJsonFile, distDir]); |
||||
|
||||
const info: PluginPackageDetails = { |
||||
plugin: await getPackageDetails(zipFile, distDir), |
||||
}; |
||||
|
||||
console.log('Setup Grafana Environment'); |
||||
let p = path.resolve(grafanaEnvDir, 'plugins', pluginInfo.id); |
||||
fs.mkdirSync(p, { recursive: true }); |
||||
await execa('unzip', [zipFile, '-d', p]); |
||||
|
||||
// If docs exist, zip them into packages
|
||||
if (fs.existsSync(docsDir)) { |
||||
console.log('Creating documentation zip'); |
||||
zipName = pluginInfo.id + '-' + pluginInfo.info.version + '-docs.zip'; |
||||
zipFile = path.resolve(packagesDir, zipName); |
||||
await execa('zip', ['-r', zipFile, '.'], { cwd: docsDir }); |
||||
|
||||
info.docs = await getPackageDetails(zipFile, docsDir); |
||||
} |
||||
|
||||
p = path.resolve(packagesDir, 'info.json'); |
||||
fs.writeFileSync(p, JSON.stringify(info, null, 2), { encoding: 'utf-8' }); |
||||
|
||||
// Write the custom settings
|
||||
p = path.resolve(grafanaEnvDir, 'custom.ini'); |
||||
const customIniBody = |
||||
`# Autogenerated by @grafana/toolkit \n` + |
||||
`[paths] \n` + |
||||
`plugins = ${path.resolve(grafanaEnvDir, 'plugins')}\n` + |
||||
`\n`; // empty line
|
||||
fs.writeFileSync(p, customIniBody, { encoding: 'utf-8' }); |
||||
|
||||
writeJobStats(start, getJobFolder()); |
||||
}; |
||||
|
||||
export const ciPackagePluginTask = new Task<PluginCIOptions>('Bundle Plugin', packagePluginRunner); |
||||
|
||||
/** |
||||
* 4. Report |
||||
* |
||||
* Create a report from all the previous steps |
||||
* |
||||
* @deprecated -- this task was written with a specific circle-ci build in mind. That system |
||||
* has been replaced with Drone, and this is no longer the best practice. Any new work |
||||
* should be defined in the grafana build pipeline tool or drone configs directly. |
||||
*/ |
||||
const pluginReportRunner: TaskRunner<PluginCIOptions> = async ({ upload }) => { |
||||
const ciDir = path.resolve(process.cwd(), 'ci'); |
||||
const packageDir = path.resolve(ciDir, 'packages'); |
||||
const packageInfo = require(path.resolve(packageDir, 'info.json')) as PluginPackageDetails; |
||||
|
||||
const pluginJsonFile = path.resolve(ciDir, 'dist', 'plugin.json'); |
||||
console.log('Load info from: ' + pluginJsonFile); |
||||
|
||||
const pluginMeta = getPluginJson(pluginJsonFile); |
||||
const report: PluginBuildReport = { |
||||
plugin: pluginMeta, |
||||
packages: packageInfo, |
||||
workflow: agregateWorkflowInfo(), |
||||
coverage: agregateCoverageInfo(), |
||||
tests: agregateTestInfo(), |
||||
artifactsBaseURL: await getCircleDownloadBaseURL(), |
||||
grafanaVersion: getGrafanaVersions(), |
||||
git: await readGitLog(), |
||||
}; |
||||
const pr = getPullRequestNumber(); |
||||
if (pr) { |
||||
report.pullRequest = pr; |
||||
} |
||||
|
||||
// Save the report to disk
|
||||
const file = path.resolve(ciDir, 'report.json'); |
||||
fs.writeFileSync(file, JSON.stringify(report, null, 2), { encoding: 'utf-8' }); |
||||
|
||||
const GRAFANA_API_KEY = process.env.GRAFANA_API_KEY; |
||||
if (!GRAFANA_API_KEY) { |
||||
console.log('Enter a GRAFANA_API_KEY to upload the plugin report'); |
||||
return; |
||||
} |
||||
const url = `https://grafana.com/api/plugins/${report.plugin.id}/ci`; |
||||
|
||||
console.log('Sending report to:', url); |
||||
const axios = require('axios'); |
||||
const info = await axios.post(url, report, { |
||||
headers: { Authorization: 'Bearer ' + GRAFANA_API_KEY }, |
||||
}); |
||||
if (info.status === 200) { |
||||
console.log('OK: ', info.data); |
||||
} else { |
||||
console.warn('Error: ', info); |
||||
} |
||||
}; |
||||
|
||||
export const ciPluginReportTask = new Task<PluginCIOptions>('Generate Plugin Report', pluginReportRunner); |
@ -1,24 +0,0 @@ |
||||
import fs = require('fs'); |
||||
import path = require('path'); |
||||
|
||||
import { useSpinner } from '../utils/useSpinner'; |
||||
|
||||
import { Task, TaskRunner } from './task'; |
||||
|
||||
interface UpdatePluginTask {} |
||||
|
||||
const updateCiConfig = () => |
||||
useSpinner('Updating CircleCI config', async () => { |
||||
const ciConfigPath = path.join(process.cwd(), '.circleci'); |
||||
if (!fs.existsSync(ciConfigPath)) { |
||||
fs.mkdirSync(ciConfigPath); |
||||
} |
||||
|
||||
const sourceFile = require.resolve('@grafana/toolkit/config/circleci/config.yml'); |
||||
const destFile = path.join(ciConfigPath, 'config.yml'); |
||||
fs.copyFileSync(sourceFile, destFile); |
||||
}); |
||||
|
||||
const pluginUpdateRunner: TaskRunner<UpdatePluginTask> = () => updateCiConfig(); |
||||
|
||||
export const pluginUpdateTask = new Task<UpdatePluginTask>('Update Plugin', pluginUpdateRunner); |
@ -1,34 +0,0 @@ |
||||
import execa = require('execa'); |
||||
import { promises as fs } from 'fs'; |
||||
|
||||
import { Task, TaskRunner } from '../task'; |
||||
|
||||
interface BundeManagedOptions {} |
||||
|
||||
const MANAGED_PLUGINS_PATH = `${process.cwd()}/plugins-bundled`; |
||||
const MANAGED_PLUGINS_SCOPES = ['internal', 'external']; |
||||
|
||||
const bundleManagedPluginsRunner: TaskRunner<BundeManagedOptions> = async () => { |
||||
await Promise.all( |
||||
MANAGED_PLUGINS_SCOPES.map(async (scope) => { |
||||
try { |
||||
const plugins = await fs.readdir(`${MANAGED_PLUGINS_PATH}/${scope}`); |
||||
if (plugins.length > 0) { |
||||
for (const plugin of plugins) { |
||||
try { |
||||
console.log(`[${scope}]: ${plugin} building...`); |
||||
await execa('yarn', ['build'], { cwd: `${MANAGED_PLUGINS_PATH}/${scope}/${plugin}` }); |
||||
console.log(`[${scope}]: ${plugin} bundled`); |
||||
} catch (e: any) { |
||||
console.log(e.stdout); |
||||
} |
||||
} |
||||
} |
||||
} catch (e) { |
||||
console.log(e); |
||||
} |
||||
}) |
||||
); |
||||
}; |
||||
|
||||
export const bundleManagedTask = new Task<BundeManagedOptions>('Bundle managed plugins', bundleManagedPluginsRunner); |
@ -1,91 +0,0 @@ |
||||
import crypto from 'crypto'; |
||||
import fs from 'fs'; |
||||
import path from 'path'; |
||||
|
||||
import { ManifestInfo } from './types'; |
||||
|
||||
const MANIFEST_FILE = 'MANIFEST.txt'; |
||||
|
||||
async function* walk(dir: string, baseDir: string): AsyncGenerator<string, any, any> { |
||||
for await (const d of await (fs.promises as any).opendir(dir)) { |
||||
const entry = path.posix.join(dir, d.name); |
||||
if (d.isDirectory()) { |
||||
yield* await walk(entry, baseDir); |
||||
} else if (d.isFile()) { |
||||
yield path.posix.relative(baseDir, entry); |
||||
} else if (d.isSymbolicLink()) { |
||||
const realPath = await (fs.promises as any).realpath(entry); |
||||
if (!realPath.startsWith(baseDir)) { |
||||
throw new Error( |
||||
`symbolic link ${path.posix.relative( |
||||
baseDir, |
||||
entry |
||||
)} targets a file outside of the base directory: ${baseDir}` |
||||
); |
||||
} |
||||
// if resolved symlink target is a file include it in the manifest
|
||||
const stats = await (fs.promises as any).stat(realPath); |
||||
if (stats.isFile()) { |
||||
yield path.posix.relative(baseDir, entry); |
||||
} |
||||
} |
||||
} |
||||
} |
||||
|
||||
export async function buildManifest(dir: string): Promise<ManifestInfo> { |
||||
const pluginJson = JSON.parse(fs.readFileSync(path.join(dir, 'plugin.json'), { encoding: 'utf8' })); |
||||
|
||||
const manifest = { |
||||
plugin: pluginJson.id, |
||||
version: pluginJson.info.version, |
||||
files: {}, |
||||
} as ManifestInfo; |
||||
|
||||
for await (const p of await walk(dir, dir)) { |
||||
if (p === MANIFEST_FILE) { |
||||
continue; |
||||
} |
||||
|
||||
manifest.files[p] = crypto |
||||
.createHash('sha256') |
||||
.update(fs.readFileSync(path.join(dir, p))) |
||||
.digest('hex'); |
||||
} |
||||
|
||||
return manifest; |
||||
} |
||||
|
||||
export async function signManifest(manifest: ManifestInfo): Promise<string> { |
||||
const GRAFANA_API_KEY = process.env.GRAFANA_API_KEY; |
||||
if (!GRAFANA_API_KEY) { |
||||
throw new Error('You must enter a GRAFANA_API_KEY to sign the plugin manifest'); |
||||
} |
||||
|
||||
const GRAFANA_COM_URL = process.env.GRAFANA_COM_URL || 'https://grafana.com/api'; |
||||
const url = GRAFANA_COM_URL + '/plugins/ci/sign'; |
||||
|
||||
const axios = require('axios'); |
||||
|
||||
try { |
||||
const info = await axios.post(url, manifest, { |
||||
headers: { Authorization: 'Bearer ' + GRAFANA_API_KEY }, |
||||
}); |
||||
if (info.status !== 200) { |
||||
console.warn('Error: ', info); |
||||
throw new Error('Error signing manifest'); |
||||
} |
||||
|
||||
return info.data; |
||||
} catch (err: any) { |
||||
if (err.response?.data?.message) { |
||||
throw new Error('Error signing manifest: ' + err.response.data.message); |
||||
} |
||||
|
||||
throw new Error('Error signing manifest: ' + err.message); |
||||
} |
||||
} |
||||
|
||||
export async function saveManifest(dir: string, signedManifest: string): Promise<boolean> { |
||||
fs.writeFileSync(path.join(dir, MANIFEST_FILE), signedManifest); |
||||
return true; |
||||
} |
@ -1,104 +0,0 @@ |
||||
import { PluginMeta, KeyValue } from '@grafana/data'; |
||||
|
||||
export interface PluginPackageDetails { |
||||
plugin: ZipFileInfo; |
||||
docs?: ZipFileInfo; |
||||
} |
||||
|
||||
export interface PluginBuildReport { |
||||
plugin: PluginMeta; |
||||
packages: PluginPackageDetails; |
||||
workflow: WorkflowInfo; |
||||
coverage: CoverageInfo[]; |
||||
tests: TestResultsInfo[]; |
||||
git?: GitLogInfo; |
||||
pullRequest?: number; |
||||
artifactsBaseURL?: string; |
||||
grafanaVersion?: KeyValue<string>; |
||||
} |
||||
|
||||
export interface JobInfo { |
||||
job?: string; |
||||
startTime: number; |
||||
endTime: number; |
||||
elapsed: number; |
||||
status?: string; |
||||
buildNumber?: number; |
||||
} |
||||
|
||||
export interface WorkflowInfo extends JobInfo { |
||||
workflowId?: string; |
||||
jobs: JobInfo[]; |
||||
user?: string; |
||||
repo?: string; |
||||
} |
||||
|
||||
export interface CoverageDetails { |
||||
total: number; |
||||
covered: number; |
||||
skipped: number; |
||||
pct: number; |
||||
} |
||||
|
||||
export interface CoverageInfo { |
||||
job: string; |
||||
summary: { [key: string]: CoverageDetails }; |
||||
report?: string; // path to report
|
||||
} |
||||
|
||||
export interface TestResultsInfo { |
||||
job: string; |
||||
grafana?: any; |
||||
error?: string; |
||||
passed: number; |
||||
failed: number; |
||||
screenshots: string[]; |
||||
} |
||||
|
||||
export interface CountAndSize { |
||||
count: number; |
||||
bytes: number; |
||||
} |
||||
|
||||
export interface ExtensionSize { |
||||
[key: string]: CountAndSize; |
||||
} |
||||
|
||||
export interface ZipFileInfo { |
||||
name: string; |
||||
size: number; |
||||
contents: ExtensionSize; |
||||
sha1?: string; |
||||
md5?: string; |
||||
} |
||||
|
||||
interface UserInfo { |
||||
name: string; |
||||
email: string; |
||||
time?: number; |
||||
} |
||||
|
||||
export interface GitLogInfo { |
||||
commit: string; |
||||
tree: string; |
||||
subject: string; |
||||
body?: string; |
||||
notes?: string; |
||||
author: UserInfo; |
||||
commiter: UserInfo; |
||||
} |
||||
|
||||
export interface ManifestInfo { |
||||
// time: number; << filled in by the server
|
||||
// keyId: string; << filled in by the server
|
||||
// signedByOrg: string; << filled in by the server
|
||||
// signedByOrgName: string; << filled in by the server
|
||||
signatureType?: string; // filled in by the server if not specified
|
||||
rootUrls?: string[]; // for private signatures
|
||||
plugin: string; |
||||
version: string; |
||||
files: Record<string, string>; |
||||
toolkit?: { |
||||
version: string; |
||||
}; |
||||
} |
@ -1,132 +0,0 @@ |
||||
import execa from 'execa'; |
||||
import fs from 'fs'; |
||||
import path from 'path'; |
||||
|
||||
import { KeyValue } from '@grafana/data'; |
||||
|
||||
import { ExtensionSize, ZipFileInfo, GitLogInfo } from './types'; |
||||
|
||||
const md5File = require('md5-file'); |
||||
|
||||
export function getGrafanaVersions(): KeyValue<string> { |
||||
const dir = path.resolve(process.cwd(), 'node_modules', '@grafana'); |
||||
const versions: KeyValue = {}; |
||||
try { |
||||
fs.readdirSync(dir).forEach((file) => { |
||||
const json = require(path.resolve(dir, file, 'package.json')); |
||||
versions[file] = json.version; |
||||
}); |
||||
} catch (err) { |
||||
console.warn('Error reading toolkit versions', err); |
||||
} |
||||
return versions; |
||||
} |
||||
|
||||
export function getFileSizeReportInFolder(dir: string, info?: ExtensionSize): ExtensionSize { |
||||
const acc: ExtensionSize = info ? info : {}; |
||||
|
||||
const files = fs.readdirSync(dir); |
||||
if (files) { |
||||
files.forEach((file) => { |
||||
const newbase = path.join(dir, file); |
||||
const stat = fs.statSync(newbase); |
||||
if (stat.isDirectory()) { |
||||
getFileSizeReportInFolder(newbase, info); |
||||
} else { |
||||
let ext = '_none_'; |
||||
const idx = file.lastIndexOf('.'); |
||||
if (idx > 0) { |
||||
ext = file.substring(idx + 1).toLowerCase(); |
||||
} |
||||
const current = acc[ext]; |
||||
if (current) { |
||||
current.count += 1; |
||||
current.bytes += stat.size; |
||||
} else { |
||||
acc[ext] = { bytes: stat.size, count: 1 }; |
||||
} |
||||
} |
||||
}); |
||||
} |
||||
return acc; |
||||
} |
||||
|
||||
export async function getPackageDetails(zipFile: string, zipSrc: string, writeChecksum = true): Promise<ZipFileInfo> { |
||||
const zipStats = fs.statSync(zipFile); |
||||
if (zipStats.size < 100) { |
||||
throw new Error('Invalid zip file: ' + zipFile); |
||||
} |
||||
const info: ZipFileInfo = { |
||||
name: path.basename(zipFile), |
||||
size: zipStats.size, |
||||
contents: getFileSizeReportInFolder(zipSrc), |
||||
}; |
||||
try { |
||||
const exe = await execa('shasum', [zipFile]); |
||||
const idx = exe.stdout.indexOf(' '); |
||||
const sha1 = exe.stdout.substring(0, idx); |
||||
if (writeChecksum) { |
||||
fs.writeFile(zipFile + '.sha1', sha1, (err) => {}); |
||||
} |
||||
info.sha1 = sha1; |
||||
} catch { |
||||
console.warn('Unable to read SHA1 Checksum'); |
||||
} |
||||
try { |
||||
info.md5 = md5File.sync(zipFile); |
||||
} catch { |
||||
console.warn('Unable to read MD5 Checksum'); |
||||
} |
||||
return info; |
||||
} |
||||
|
||||
export function findImagesInFolder(dir: string, prefix = '', append?: string[]): string[] { |
||||
const imgs = append || []; |
||||
|
||||
const files = fs.readdirSync(dir); |
||||
if (files) { |
||||
files.forEach((file) => { |
||||
if (file.endsWith('.png')) { |
||||
imgs.push(file); |
||||
} |
||||
}); |
||||
} |
||||
|
||||
return imgs; |
||||
} |
||||
|
||||
export async function readGitLog(): Promise<GitLogInfo | undefined> { |
||||
try { |
||||
let exe = await execa('git', [ |
||||
'log', |
||||
'-1', // last line
|
||||
'--pretty=format:{%n "commit": "%H",%n "tree": "%T",%n "subject": "%s",%n "author": {%n "name": "%aN",%n "email": "%aE",%n "time":"%at" },%n "commiter": {%n "name": "%cN",%n "email": "%cE",%n "time":"%ct" }%n}', |
||||
]); |
||||
const info = JSON.parse(exe.stdout) as GitLogInfo; |
||||
|
||||
// Read the body
|
||||
exe = await execa('git', [ |
||||
'log', |
||||
'-1', // last line
|
||||
'--pretty=format:%b', // Just the body (with newlines!)
|
||||
]); |
||||
if (exe.stdout && exe.stdout.length) { |
||||
info.body = exe.stdout.trim(); |
||||
} |
||||
|
||||
// Read any commit notes
|
||||
exe = await execa('git', [ |
||||
'log', |
||||
'-1', // last line
|
||||
'--pretty=format:%N', // commit notes (with newlines!)
|
||||
]); |
||||
if (exe.stdout && exe.stdout.length) { |
||||
info.notes = exe.stdout.trim(); |
||||
} |
||||
|
||||
return info; |
||||
} catch (err) { |
||||
console.warn('Error REading Git log info', err); |
||||
} |
||||
return undefined; |
||||
} |
@ -1,100 +0,0 @@ |
||||
import fs from 'fs'; |
||||
import path from 'path'; |
||||
|
||||
import { getBuildNumber, getCiFolder } from './env'; |
||||
import { JobInfo, WorkflowInfo, CoverageInfo, TestResultsInfo } from './types'; |
||||
|
||||
export const agregateWorkflowInfo = (): WorkflowInfo => { |
||||
const now = Date.now(); |
||||
const workflow: WorkflowInfo = { |
||||
jobs: [], |
||||
startTime: now, |
||||
endTime: now, |
||||
workflowId: process.env.CIRCLE_WORKFLOW_ID, |
||||
repo: process.env.CIRCLE_PROJECT_REPONAME, |
||||
user: process.env.CIRCLE_PROJECT_USERNAME, |
||||
buildNumber: getBuildNumber(), |
||||
elapsed: 0, |
||||
}; |
||||
|
||||
const jobsFolder = path.resolve(getCiFolder(), 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach((file) => { |
||||
const p = path.resolve(jobsFolder, file, 'job.json'); |
||||
if (fs.existsSync(p)) { |
||||
const job = require(p) as JobInfo; |
||||
workflow.jobs.push(job); |
||||
if (job.startTime < workflow.startTime) { |
||||
workflow.startTime = job.startTime; |
||||
} |
||||
if (job.endTime > workflow.endTime) { |
||||
workflow.endTime = job.endTime; |
||||
} |
||||
} else { |
||||
console.log('Missing Job info: ', p); |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO JOBS IN: ', jobsFolder); |
||||
} |
||||
} |
||||
|
||||
workflow.elapsed = workflow.endTime - workflow.startTime; |
||||
return workflow; |
||||
}; |
||||
|
||||
export const agregateCoverageInfo = (): CoverageInfo[] => { |
||||
const coverage: CoverageInfo[] = []; |
||||
const ciDir = getCiFolder(); |
||||
const jobsFolder = path.resolve(ciDir, 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach((file) => { |
||||
const dir = path.resolve(jobsFolder, file, 'coverage'); |
||||
if (fs.existsSync(dir)) { |
||||
const s = path.resolve(dir, 'coverage-summary.json'); |
||||
const r = path.resolve(dir, 'lcov-report', 'index.html'); |
||||
if (fs.existsSync(s)) { |
||||
const raw = require(s); |
||||
const info: CoverageInfo = { |
||||
job: file, |
||||
summary: raw.total, |
||||
}; |
||||
if (fs.existsSync(r)) { |
||||
info.report = r.substring(ciDir.length); |
||||
} |
||||
coverage.push(info); |
||||
} |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO JOBS IN: ', jobsFolder); |
||||
} |
||||
} |
||||
return coverage; |
||||
}; |
||||
|
||||
export const agregateTestInfo = (): TestResultsInfo[] => { |
||||
const tests: TestResultsInfo[] = []; |
||||
const ciDir = getCiFolder(); |
||||
const jobsFolder = path.resolve(ciDir, 'jobs'); |
||||
if (fs.existsSync(jobsFolder)) { |
||||
const files = fs.readdirSync(jobsFolder); |
||||
if (files && files.length) { |
||||
files.forEach((file) => { |
||||
if (file.startsWith('test')) { |
||||
const summary = path.resolve(jobsFolder, file, 'results.json'); |
||||
if (fs.existsSync(summary)) { |
||||
tests.push(require(summary) as TestResultsInfo); |
||||
} |
||||
} |
||||
}); |
||||
} else { |
||||
console.log('NO Jobs IN: ', jobsFolder); |
||||
} |
||||
} |
||||
return tests; |
||||
}; |
Loading…
Reference in new issue