mirror of https://github.com/grafana/grafana
Storage: k6 tests (#57496)
* object store k6 * update script * refactor * rename scripts * fix paths * fixes * fix client - check connected state * add teardown timeout * rename to grpc object store client * fail if health check fails * abort rather than fail * stale import * create `run.sh` * adjust for dummy server * fix mkdir * clean up dependencies * remove name and version * bring back name and version! * remove clean webpackk plugin * remove copy plugin * update yarn lock * remove stale import * update yarn lock * move perf tests to devenv/docker/loadtest-ts * add codeownrespull/57665/head
parent
10fdfa8583
commit
616db7f68b
@ -0,0 +1,10 @@ |
||||
{ |
||||
"presets": [ |
||||
"@babel/env", |
||||
"@babel/typescript" |
||||
], |
||||
"plugins": [ |
||||
"@babel/proposal-class-properties", |
||||
"@babel/proposal-object-rest-spread" |
||||
] |
||||
} |
@ -0,0 +1,3 @@ |
||||
scripts/tmp |
||||
dist/ |
||||
.yarn |
@ -0,0 +1,14 @@ |
||||
# Grafana load tests written in typescript - EXPERIMENTAL |
||||
|
||||
Runs load tests written in typescript and checks Grafana's performance using [k6](https://k6.io/) |
||||
|
||||
This is **experimental** - please consider adding new tests to devenv/docker/loadtest while we are testing the typescript approach! |
||||
|
||||
|
||||
|
||||
# How to run |
||||
|
||||
``` |
||||
yarn install |
||||
GRPC_TOKEN={REPLACE_WITH_SERVICE_ACCOUNT_ADMIN_TOKEN} ./run.sh test=object-store-test grpcAddress=127.0.0.1:10000 execution=local |
||||
``` |
@ -0,0 +1,28 @@ |
||||
{ |
||||
"private": true, |
||||
"license": "Apache-2.0", |
||||
"name": "@grafana/perf-tests", |
||||
"version": "9.3.0-pre", |
||||
"devDependencies": { |
||||
"@babel/core": "7.19.0", |
||||
"@babel/plugin-proposal-class-properties": "7.18.6", |
||||
"@babel/plugin-proposal-object-rest-spread": "7.18.9", |
||||
"@babel/preset-env": "7.19.0", |
||||
"@babel/preset-typescript": "7.18.6", |
||||
"@types/k6": "0.39.0", |
||||
"@types/shortid": "0.0.29", |
||||
"@types/webpack": "5.28.0", |
||||
"babel-loader": "8.2.5", |
||||
"shortid": "2.2.16", |
||||
"ts-node": "10.9.1", |
||||
"typescript": "4.8.2", |
||||
"webpack": "5.74.0", |
||||
"webpack-cli": "4.10.0", |
||||
"webpack-glob-entries": "1.0.1" |
||||
}, |
||||
"scripts": { |
||||
"build": "webpack", |
||||
"prepare-testdata": "yarn run prepare-testdata:object-store-test", |
||||
"prepare-testdata:object-store-test": "ts-node scripts/prepareDashboardFileNames.ts ../../dev-dashboards ./scripts/tmp/filenames.json" |
||||
} |
||||
} |
@ -0,0 +1,64 @@ |
||||
#!/usr/bin/env bash |
||||
|
||||
if ((BASH_VERSINFO[0] < 4)); then |
||||
echo "Bash ver >= 4 is needed to run this script" |
||||
echo "Please upgrade your bash - run 'brew install bash' if you use Homebrew on MacOS" |
||||
exit 1; |
||||
fi |
||||
|
||||
declare -A cfg=( |
||||
[grpcToken]=$GRPC_TOKEN |
||||
[grpcAddress]="127.0.0.1:10000" |
||||
[execution]="local" |
||||
[test]="object-store-test" |
||||
[k6CloudToken]=$K6_CLOUD_TOKEN |
||||
) |
||||
|
||||
for ARGUMENT in "$@" |
||||
do |
||||
KEY=$(echo $ARGUMENT | cut -f1 -d=) |
||||
|
||||
KEY_LENGTH=${#KEY} |
||||
VALUE="${ARGUMENT:$KEY_LENGTH+1}" |
||||
cfg["$KEY"]="$VALUE" |
||||
done |
||||
|
||||
function usage() { |
||||
echo "$0 grpcAddress= grpcToken= execution= k6CloudToken= test= |
||||
- 'grpcAddress' is the address of Grafana gRPC server. 127.0.0.1:10000 is the default. |
||||
- 'grpcToken' is the service account admin token used for Grafana gRPC server authentication. |
||||
- 'execution' is the test execution mode; one of 'local', 'cloud-output', 'cloud'. 'local' is the default. |
||||
- 'k6CloudToken' is the k6 cloud token required for 'cloud-output' and 'cloud' execution modes. |
||||
- 'test' is the filepath of the test to execute relative to ./src, without the extension. example 'object-store-test'" |
||||
exit 0 |
||||
} |
||||
|
||||
if [ "${cfg[grpcToken]}" == "" ]; then |
||||
usage |
||||
fi |
||||
|
||||
|
||||
if [ "${cfg[execution]}" == "cloud" ]; then |
||||
echo "cloud execution mode is not yet implemented" |
||||
exit 0 |
||||
elif [ "${cfg[execution]}" == "cloud-output" ]; then |
||||
if [ "${cfg[k6CloudToken]}" == "" ]; then |
||||
usage |
||||
fi |
||||
elif [ "${cfg[execution]}" != "local" ]; then |
||||
usage |
||||
fi |
||||
|
||||
yarn run build |
||||
yarn run prepare-testdata |
||||
|
||||
TEST_PATH="./dist/${cfg[test]}.js" |
||||
echo "$(date '+%Y-%m-%d %H:%M:%S'): Executing test ${TEST_PATH} in ${cfg[execution]} mode" |
||||
|
||||
if [ "${cfg[execution]}" == "cloud-output" ]; then |
||||
GRPC_TOKEN="${cfg[grpcToken]}" GRPC_ADDRESS="${cfg[grpcAddress]}" K6_CLOUD_TOKEN="${cfg[k6CloudToken]}" k6 run --out cloud "$TEST_PATH" |
||||
elif [ "${cfg[execution]}" == "local" ]; then |
||||
GRPC_TOKEN="${cfg[grpcToken]}" GRPC_ADDRESS="${cfg[grpcAddress]}" k6 run "$TEST_PATH" |
||||
fi |
||||
|
||||
|
@ -0,0 +1,24 @@ |
||||
import { readdirSync, writeFileSync, mkdirSync } from 'fs'; |
||||
import { dirname, resolve } from 'path'; |
||||
|
||||
const args = process.argv.slice(2); |
||||
|
||||
if (args.length !== 2) { |
||||
throw new Error('expected dev dashboards dir and the output file path'); |
||||
} |
||||
|
||||
const devDashboardsDir = args[0]; |
||||
const outputFilePath = args[1]; |
||||
|
||||
const getFiles = (dirPath: string, ext?: string): string[] => |
||||
readdirSync(dirPath, { withFileTypes: true }) |
||||
.flatMap((dirEntry) => { |
||||
const res = resolve(dirPath, dirEntry.name); |
||||
return dirEntry.isDirectory() ? getFiles(res) : res; |
||||
}) |
||||
.filter((path) => (ext?.length ? path.endsWith(ext) : true)); |
||||
|
||||
const files = getFiles(devDashboardsDir, '.json'); |
||||
|
||||
mkdirSync(dirname(outputFilePath), { recursive: true }); |
||||
writeFileSync(outputFilePath, JSON.stringify(files, null, 2)); |
@ -0,0 +1,116 @@ |
||||
const testDash = { |
||||
annotations: { list: [] }, |
||||
editable: true, |
||||
fiscalYearStartMonth: 0, |
||||
graphTooltip: 0, |
||||
id: 100, |
||||
links: [], |
||||
liveNow: false, |
||||
panels: [ |
||||
{ |
||||
datasource: { |
||||
type: 'testdata', |
||||
uid: 'testdata', |
||||
}, |
||||
fieldConfig: { |
||||
defaults: { |
||||
color: { |
||||
mode: 'thresholds', |
||||
}, |
||||
custom: { |
||||
align: 'auto', |
||||
displayMode: 'auto', |
||||
inspect: false, |
||||
}, |
||||
mappings: [], |
||||
thresholds: { |
||||
mode: 'absolute', |
||||
steps: [ |
||||
{ |
||||
color: 'green', |
||||
value: null, |
||||
}, |
||||
{ |
||||
color: 'red', |
||||
value: 80, |
||||
}, |
||||
], |
||||
}, |
||||
}, |
||||
overrides: [], |
||||
}, |
||||
gridPos: { |
||||
h: 9, |
||||
w: 12, |
||||
x: 0, |
||||
y: 0, |
||||
}, |
||||
id: 2, |
||||
options: { |
||||
footer: { |
||||
fields: '', |
||||
reducer: ['sum'], |
||||
show: false, |
||||
}, |
||||
showHeader: true, |
||||
}, |
||||
pluginVersion: '9.3.0-pre', |
||||
targets: [ |
||||
{ |
||||
csvContent: '', |
||||
datasource: { |
||||
type: 'testdata', |
||||
uid: 'PD8C576611E62080A', |
||||
}, |
||||
refId: 'A', |
||||
scenarioId: 'csv_content', |
||||
}, |
||||
], |
||||
title: 'Panel Title', |
||||
type: 'table', |
||||
}, |
||||
], |
||||
schemaVersion: 37, |
||||
style: 'dark', |
||||
tags: [], |
||||
templating: { |
||||
list: [], |
||||
}, |
||||
time: { |
||||
from: 'now-6h', |
||||
to: 'now', |
||||
}, |
||||
timepicker: {}, |
||||
timezone: '', |
||||
title: 'New dashboard', |
||||
uid: '5v6e5VH4z', |
||||
version: 1, |
||||
weekStart: '', |
||||
} as const; |
||||
|
||||
const getCsvContent = (lengthInKb: number): string => { |
||||
const lines: string[] = ['id,name']; |
||||
for (let i = 0; i < lengthInKb; i++) { |
||||
const prefix = `${i},`; |
||||
lines.push(prefix + 'a'.repeat(1024 - prefix.length)); |
||||
} |
||||
return lines.join('\n'); |
||||
}; |
||||
|
||||
export const prepareDashboard = (lengthInKb: number): Record<string, unknown> => { |
||||
const firstPanel = testDash.panels[0]; |
||||
return { |
||||
...testDash, |
||||
panels: [ |
||||
{ |
||||
...firstPanel, |
||||
targets: [ |
||||
{ |
||||
...firstPanel.targets[0], |
||||
csvContent: getCsvContent(lengthInKb), |
||||
}, |
||||
], |
||||
}, |
||||
], |
||||
}; |
||||
}; |
@ -0,0 +1,214 @@ |
||||
import { check } from 'k6'; |
||||
import { b64encode } from 'k6/encoding'; |
||||
import grpc from 'k6/net/grpc'; |
||||
|
||||
import { Object } from './prepare-data'; |
||||
|
||||
enum GRPCMethods { |
||||
ServerHealth = 'grpc.health.v1.Health/Check', |
||||
ObjectWrite = 'object.ObjectStore/Write', |
||||
ObjectDelete = 'object.ObjectStore/Delete', |
||||
ObjectRead = 'object.ObjectStore/Read', |
||||
} |
||||
|
||||
export class GRPCObjectStoreClient { |
||||
private connected = false; |
||||
constructor(private client: grpc.Client, private grpcAddress: string, private grpcToken: string) {} |
||||
|
||||
connect = () => { |
||||
if (!this.connected) { |
||||
this.client.connect(this.grpcAddress, { plaintext: true, reflect: true }); |
||||
this.connected = true; |
||||
} |
||||
}; |
||||
|
||||
grpcRequestParams = () => { |
||||
return { |
||||
metadata: { |
||||
authorization: `Bearer ${this.grpcToken}`, |
||||
}, |
||||
}; |
||||
}; |
||||
|
||||
healthCheck = (): boolean => { |
||||
this.connect(); |
||||
const response = this.client.invoke(GRPCMethods.ServerHealth, {}); |
||||
|
||||
return check(response, { |
||||
'server is healthy': (r) => { |
||||
const statusOK = r && r.status === grpc.StatusOK; |
||||
if (!statusOK) { |
||||
return false; |
||||
} |
||||
|
||||
const body = r.message; |
||||
// @ts-ignore
|
||||
return 'status' in body && body.status === 'SERVING'; |
||||
}, |
||||
}); |
||||
}; |
||||
|
||||
deleteObject = (uid: string, kind: string, _?: {}) => { |
||||
this.connect(); |
||||
|
||||
const response = this.client.invoke( |
||||
GRPCMethods.ObjectDelete, |
||||
{ |
||||
kind: kind, |
||||
UID: uid, |
||||
}, |
||||
this.grpcRequestParams() |
||||
); |
||||
|
||||
check(response, { |
||||
'object was deleted': (r) => { |
||||
const statusOK = r && r.status === grpc.StatusOK; |
||||
if (!statusOK) { |
||||
return false; |
||||
} |
||||
|
||||
if (!isDeleteObjectResponse(r.message)) { |
||||
console.log( |
||||
JSON.stringify({ |
||||
type: 'invalid_delete_response', |
||||
uid: uid, |
||||
kind: kind, |
||||
resp: r, |
||||
}) |
||||
); |
||||
return false; |
||||
} |
||||
|
||||
return true; |
||||
}, |
||||
}); |
||||
}; |
||||
|
||||
readObject = (uid: string, kind: string, _?: {}) => { |
||||
this.connect(); |
||||
|
||||
const response = this.client.invoke( |
||||
GRPCMethods.ObjectRead, |
||||
{ |
||||
kind: kind, |
||||
UID: uid, |
||||
with_body: true, |
||||
with_summary: true, |
||||
}, |
||||
this.grpcRequestParams() |
||||
); |
||||
|
||||
check(response, { |
||||
'object exists': (r) => { |
||||
const statusOK = r && r.status === grpc.StatusOK; |
||||
if (!statusOK) { |
||||
return false; |
||||
} |
||||
|
||||
const respBody = r.message; |
||||
if (!isReadObjectResponse(respBody)) { |
||||
console.log( |
||||
JSON.stringify({ |
||||
type: 'invalid_read_response', |
||||
uid: uid, |
||||
kind: kind, |
||||
resp: r, |
||||
}) |
||||
); |
||||
return false; |
||||
} |
||||
|
||||
return typeof respBody.object.body === 'string'; |
||||
}, |
||||
}); |
||||
}; |
||||
|
||||
writeObject = (object: Object, opts?: { randomizeData?: boolean; checkCreatedOrUpdated?: boolean }) => { |
||||
this.connect(); |
||||
|
||||
const data = opts?.randomizeData |
||||
? { |
||||
...object.data, |
||||
__random: `${Date.now() - Math.random()}`, |
||||
} |
||||
: object.data; |
||||
|
||||
const response = this.client.invoke( |
||||
GRPCMethods.ObjectWrite, |
||||
{ |
||||
body: b64encode(JSON.stringify(data)), |
||||
comment: '', |
||||
kind: object.kind, |
||||
UID: object.uid, |
||||
}, |
||||
this.grpcRequestParams() |
||||
); |
||||
|
||||
const checkName = opts?.checkCreatedOrUpdated ? 'object was created or updated' : 'object was created'; |
||||
check(response, { |
||||
[checkName]: (r) => { |
||||
const statusOK = r && r.status === grpc.StatusOK; |
||||
if (!statusOK) { |
||||
return false; |
||||
} |
||||
|
||||
const respBody = r.message; |
||||
if (!isWriteObjectResponse(respBody)) { |
||||
console.log( |
||||
JSON.stringify({ |
||||
type: 'invalid_write_response', |
||||
uid: object.uid, |
||||
kind: object.kind, |
||||
resp: r, |
||||
}) |
||||
); |
||||
return false; |
||||
} |
||||
|
||||
return opts?.checkCreatedOrUpdated |
||||
? respBody.status === WriteObjectResponseStatus.UPDATED || |
||||
respBody.status === WriteObjectResponseStatus.CREATED |
||||
: respBody.status === WriteObjectResponseStatus.CREATED; |
||||
}, |
||||
}); |
||||
}; |
||||
} |
||||
|
||||
type DeleteObjectResponse = { |
||||
OK: boolean; |
||||
}; |
||||
|
||||
const isDeleteObjectResponse = (resp: object): resp is DeleteObjectResponse => { |
||||
return resp.hasOwnProperty('OK'); |
||||
}; |
||||
|
||||
enum WriteObjectResponseStatus { |
||||
CREATED = 'CREATED', |
||||
UPDATED = 'UPDATED', |
||||
} |
||||
|
||||
type WriteObjectResponse = { |
||||
status: WriteObjectResponseStatus; |
||||
}; |
||||
|
||||
const isWriteObjectResponse = (resp: object): resp is WriteObjectResponse => { |
||||
return resp.hasOwnProperty('status'); |
||||
}; |
||||
|
||||
type ReadObjectResponse = { |
||||
object: { |
||||
UID: string; |
||||
kind: string; |
||||
body: string; |
||||
}; |
||||
}; |
||||
|
||||
const isReadObjectResponse = (resp: object): resp is ReadObjectResponse => { |
||||
if (!resp.hasOwnProperty('object')) { |
||||
return false; |
||||
} |
||||
|
||||
// @ts-ignore
|
||||
const object = resp.object; |
||||
return Boolean(object && typeof object === 'object' && object.hasOwnProperty('body')); |
||||
}; |
@ -0,0 +1,145 @@ |
||||
import { SharedArray } from 'k6/data'; |
||||
import execution from 'k6/execution'; |
||||
import grpc from 'k6/net/grpc'; |
||||
|
||||
import { GRPCObjectStoreClient } from './object-store-client'; |
||||
import { Data, prepareData } from './prepare-data'; |
||||
|
||||
const grpcToken = __ENV.GRPC_TOKEN; |
||||
const grpcAddress = __ENV.GRPC_ADDRESS; |
||||
|
||||
if (typeof grpcToken !== 'string' || !grpcToken.length) { |
||||
throw new Error('GRPC_TOKEN env variable is missing'); |
||||
} |
||||
|
||||
if (typeof grpcAddress !== 'string' || !grpcAddress.length) { |
||||
throw new Error('GRPC_ADDRESS env variable is missing'); |
||||
} |
||||
|
||||
const client = new grpc.Client(); |
||||
const objectStoreClient = new GRPCObjectStoreClient(client, grpcAddress, grpcToken); |
||||
|
||||
const data: Data = new SharedArray('data', () => { |
||||
return [prepareData(JSON.parse(open('../scripts/tmp/filenames.json')), 50)]; |
||||
})[0]; |
||||
|
||||
const scenarioDuration = '2m'; |
||||
|
||||
export const options = { |
||||
setupTimeout: '5m', |
||||
teardownTimeout: '5m', |
||||
noConnectionReuse: true, |
||||
scenarios: { |
||||
writer: { |
||||
exec: 'writer', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 1, |
||||
timeUnit: '2s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 1, |
||||
}, |
||||
reader: { |
||||
exec: 'reader', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 10, |
||||
timeUnit: '2s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 10, |
||||
}, |
||||
writer1mb: { |
||||
exec: 'writer1mb', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 1, |
||||
timeUnit: '20s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 5, |
||||
}, |
||||
reader1mb: { |
||||
startTime: '2s', |
||||
exec: 'reader1mb', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 1, |
||||
timeUnit: '1s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 5, |
||||
}, |
||||
writer4mb: { |
||||
exec: 'writer4mb', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 1, |
||||
timeUnit: '30s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 5, |
||||
}, |
||||
reader4mb: { |
||||
startTime: '3s', |
||||
exec: 'reader4mb', |
||||
executor: 'constant-arrival-rate', |
||||
rate: 1, |
||||
timeUnit: '5s', |
||||
duration: scenarioDuration, |
||||
preAllocatedVUs: 1, |
||||
maxVUs: 5, |
||||
}, |
||||
}, |
||||
// thresholds: { http_req_duration: ['avg<100', 'p(95)<200'] },
|
||||
}; |
||||
|
||||
export function setup() { |
||||
if (!objectStoreClient.healthCheck()) { |
||||
execution.test.abort('server should be healthy'); |
||||
} |
||||
|
||||
console.log('inserting base objects'); |
||||
for (let i = 0; i < data.base.length; i++) { |
||||
if (i % 100 === 0) { |
||||
console.log(`inserted ${i} / ${data.base.length}`); |
||||
} |
||||
objectStoreClient.writeObject(data.base[i], { randomizeData: false, checkCreatedOrUpdated: false }); |
||||
} |
||||
} |
||||
|
||||
export function teardown() { |
||||
const toDelete = [...data.base, ...data.toWrite, data.size1mb, data.size4mb, data.size100kb]; |
||||
|
||||
console.log('deleting base objects'); |
||||
for (let i = 0; i < toDelete.length; i++) { |
||||
if (i % 100 === 0) { |
||||
console.log(`deleted ${i} / ${data.base.length}`); |
||||
} |
||||
objectStoreClient.deleteObject(toDelete[i].uid, toDelete[i].kind); |
||||
} |
||||
} |
||||
|
||||
export function reader() { |
||||
const item = data.base[execution.scenario.iterationInTest % data.base.length]; |
||||
objectStoreClient.readObject(item.uid, item.kind); |
||||
} |
||||
|
||||
export function writer() { |
||||
const item = data.toWrite[execution.scenario.iterationInTest % data.toWrite.length]; |
||||
objectStoreClient.writeObject(item, { randomizeData: true, checkCreatedOrUpdated: true }); |
||||
} |
||||
|
||||
export function writer1mb() { |
||||
objectStoreClient.writeObject(data.size1mb, { randomizeData: true, checkCreatedOrUpdated: true }); |
||||
} |
||||
|
||||
export function reader1mb() { |
||||
const item = data.size1mb; |
||||
objectStoreClient.readObject(item.uid, item.kind); |
||||
} |
||||
|
||||
export function writer4mb() { |
||||
objectStoreClient.writeObject(data.size4mb, { randomizeData: true, checkCreatedOrUpdated: true }); |
||||
} |
||||
|
||||
export function reader4mb() { |
||||
const item = data.size4mb; |
||||
objectStoreClient.readObject(item.uid, item.kind); |
||||
} |
@ -0,0 +1,57 @@ |
||||
import shortid from 'shortid'; |
||||
|
||||
import { prepareDashboard } from './get-large-dashboard'; |
||||
|
||||
export type Object = { |
||||
data: Record<string, unknown>; |
||||
kind: string; |
||||
uid: string; |
||||
}; |
||||
|
||||
export type Data = { |
||||
base: Object[]; // objects that are inserted in the test setup and removed only in the teardown
|
||||
toWrite: Object[]; // objects that are inserted by scenarios and removed after a short period of time: Object;
|
||||
size100kb: Object; |
||||
size1mb: Object; |
||||
size4mb: Object; |
||||
}; |
||||
|
||||
export const readAsObjects = (paths: string[], kind: string): Object[] => { |
||||
return paths.map((p) => ({ |
||||
data: JSON.parse(open(p)), |
||||
uid: shortid.generate(), |
||||
kind, |
||||
})); |
||||
}; |
||||
|
||||
export const getBase = (uniqueObjects: Object[], no: number): Object[] => { |
||||
const base = new Array<Object>(no); |
||||
for (let i = 0; i < no; i++) { |
||||
const obj = uniqueObjects[Math.floor(i % uniqueObjects.length)]; |
||||
base[i] = { |
||||
...obj, |
||||
uid: `${obj.uid}-${Math.floor(i / uniqueObjects.length)}`, |
||||
}; |
||||
} |
||||
|
||||
return base; |
||||
}; |
||||
|
||||
const prepareObject = (lengthInKb: number): Object => { |
||||
return { |
||||
data: prepareDashboard(lengthInKb), |
||||
kind: 'dashboard', |
||||
uid: shortid(), |
||||
}; |
||||
}; |
||||
|
||||
export const prepareData = (dashboardFilePaths: string[], baseNumber: number): Data => { |
||||
const objects = readAsObjects(dashboardFilePaths, 'dashboard'); |
||||
return { |
||||
base: getBase(objects, baseNumber), |
||||
toWrite: objects, |
||||
size100kb: prepareObject(100), |
||||
size1mb: prepareObject(1000), |
||||
size4mb: prepareObject(4000), |
||||
}; |
||||
}; |
@ -0,0 +1,26 @@ |
||||
{ |
||||
"compilerOptions": { |
||||
"target": "es5", |
||||
"moduleResolution": "node", |
||||
"module": "commonjs", |
||||
"noEmit": true, |
||||
"allowJs": true, |
||||
"removeComments": false, |
||||
|
||||
"strict": true, |
||||
"noImplicitAny": true, |
||||
"noImplicitThis": true, |
||||
|
||||
"noUnusedLocals": true, |
||||
"noUnusedParameters": true, |
||||
"noImplicitReturns": true, |
||||
"noFallthroughCasesInSwitch": true, |
||||
|
||||
"allowSyntheticDefaultImports": true, |
||||
"esModuleInterop": true, |
||||
"experimentalDecorators": true, |
||||
"emitDecoratorMetadata": true, |
||||
|
||||
"skipLibCheck": true |
||||
} |
||||
} |
@ -0,0 +1,37 @@ |
||||
const path = require('path'); |
||||
const GlobEntries = require('webpack-glob-entries'); |
||||
|
||||
module.exports = { |
||||
mode: 'production', |
||||
entry: GlobEntries('./src/*test*.ts'), // Generates multiple entry for each test
|
||||
output: { |
||||
path: path.join(__dirname, 'dist'), |
||||
libraryTarget: 'commonjs', |
||||
filename: '[name].js', |
||||
clean: true, |
||||
}, |
||||
resolve: { |
||||
extensions: ['.ts', '.js'], |
||||
}, |
||||
module: { |
||||
rules: [ |
||||
{ |
||||
test: /\.ts$/, |
||||
use: 'babel-loader', |
||||
exclude: /node_modules/, |
||||
}, |
||||
], |
||||
}, |
||||
target: 'web', |
||||
externals: /^(k6|https?\:\/\/)(\/.*)?/, |
||||
// Generate map files for compiled scripts
|
||||
devtool: 'source-map', |
||||
stats: { |
||||
colors: true, |
||||
}, |
||||
plugins: [], |
||||
optimization: { |
||||
// Don't minimize, as it's not used in the browser
|
||||
minimize: false, |
||||
}, |
||||
}; |
File diff suppressed because it is too large
Load Diff
Loading…
Reference in new issue