Build: Drone starlark file cleanup (#59919)

* format drone starlark files with black

* clean up unused params

* more simplification

* more cleanup

* more cleanup
pull/59928/head
Dan Cech 2 years ago committed by GitHub
parent d0eeff2fa0
commit 440d8a3d88
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 46
      .drone.star
  2. 3
      Makefile
  3. 78
      scripts/drone/events/cron.star
  4. 56
      scripts/drone/events/main.star
  5. 76
      scripts/drone/events/pr.star
  6. 721
      scripts/drone/events/release.star
  7. 189
      scripts/drone/pipelines/build.star
  8. 14
      scripts/drone/pipelines/docs.star
  9. 31
      scripts/drone/pipelines/integration_tests.star
  10. 18
      scripts/drone/pipelines/lint_backend.star
  11. 15
      scripts/drone/pipelines/lint_frontend.star
  12. 53
      scripts/drone/pipelines/publish_images.star
  13. 20
      scripts/drone/pipelines/shellcheck.star
  14. 71
      scripts/drone/pipelines/test_backend.star
  15. 58
      scripts/drone/pipelines/test_frontend.star
  16. 28
      scripts/drone/pipelines/trigger_downstream.star
  17. 8
      scripts/drone/pipelines/verify_drone.star
  18. 133
      scripts/drone/pipelines/windows.star
  19. 57
      scripts/drone/services/services.star
  20. 616
      scripts/drone/steps/lib.star
  21. 61
      scripts/drone/utils/utils.star
  22. 73
      scripts/drone/vault.star
  23. 9
      scripts/drone/version.star

@ -7,17 +7,45 @@
load('scripts/drone/events/pr.star', 'pr_pipelines')
load('scripts/drone/events/main.star', 'main_pipelines')
load('scripts/drone/pipelines/docs.star', 'docs_pipelines')
load('scripts/drone/events/release.star', 'oss_pipelines', 'enterprise_pipelines', 'enterprise2_pipelines', 'publish_artifacts_pipelines', 'publish_npm_pipelines', 'publish_packages_pipeline', 'artifacts_page_pipeline')
load('scripts/drone/pipelines/publish_images.star', 'publish_image_pipelines_public', 'publish_image_pipelines_security')
load(
'scripts/drone/events/release.star',
'oss_pipelines',
'enterprise_pipelines',
'enterprise2_pipelines',
'publish_artifacts_pipelines',
'publish_npm_pipelines',
'publish_packages_pipeline',
'artifacts_page_pipeline',
)
load(
'scripts/drone/pipelines/publish_images.star',
'publish_image_pipelines_public',
'publish_image_pipelines_security',
)
load('scripts/drone/version.star', 'version_branch_pipelines')
load('scripts/drone/events/cron.star', 'cronjobs')
load('scripts/drone/vault.star', 'secrets')
def main(ctx):
edition = 'oss'
return pr_pipelines(edition=edition) + main_pipelines(edition=edition) + oss_pipelines() + enterprise_pipelines() + enterprise2_pipelines() + \
enterprise2_pipelines(prefix='custom-', trigger = {'event': ['custom']},) + \
publish_image_pipelines_public() + publish_image_pipelines_security() + \
publish_artifacts_pipelines('security') + publish_artifacts_pipelines('public') + \
publish_npm_pipelines('public') + publish_packages_pipeline() + artifacts_page_pipeline() + \
version_branch_pipelines() + cronjobs(edition=edition) + secrets()
return (
pr_pipelines()
+ main_pipelines()
+ oss_pipelines()
+ enterprise_pipelines()
+ enterprise2_pipelines()
+ enterprise2_pipelines(
prefix='custom-',
trigger={'event': ['custom']},
)
+ publish_image_pipelines_public()
+ publish_image_pipelines_security()
+ publish_artifacts_pipelines('security')
+ publish_artifacts_pipelines('public')
+ publish_npm_pipelines()
+ publish_packages_pipeline()
+ artifacts_page_pipeline()
+ version_branch_pipelines()
+ cronjobs()
+ secrets()
)

@ -233,5 +233,8 @@ drone: $(DRONE)
$(DRONE) lint .drone.yml --trusted
$(DRONE) --server https://drone.grafana.net sign --save grafana/grafana
format-drone:
black --include '\.star$$' -S scripts/drone/ .drone.star
help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

@ -1,22 +1,23 @@
load('scripts/drone/vault.star', 'from_secret', 'pull_secret')
load('scripts/drone/steps/lib.star', 'publish_image', 'compile_build_cmd')
load('scripts/drone/vault.star', 'from_secret')
load(
'scripts/drone/steps/lib.star',
'publish_image',
'compile_build_cmd',
)
aquasec_trivy_image = 'aquasec/trivy:0.21.0'
def cronjobs(edition):
grafana_com_nightly_pipeline = cron_job_pipeline(
cronName='grafana-com-nightly',
name='grafana-com-nightly',
steps=[compile_build_cmd(),post_to_grafana_com_step()]
)
def cronjobs():
return [
scan_docker_image_pipeline(edition, 'latest'),
scan_docker_image_pipeline(edition, 'main'),
scan_docker_image_pipeline(edition, 'latest-ubuntu'),
scan_docker_image_pipeline(edition, 'main-ubuntu'),
grafana_com_nightly_pipeline,
scan_docker_image_pipeline('latest'),
scan_docker_image_pipeline('main'),
scan_docker_image_pipeline('latest-ubuntu'),
scan_docker_image_pipeline('main-ubuntu'),
grafana_com_nightly_pipeline(),
]
def cron_job_pipeline(cronName, name, steps):
return {
'kind': 'pipeline',
@ -36,13 +37,9 @@ def cron_job_pipeline(cronName, name, steps):
'steps': steps,
}
def scan_docker_image_pipeline(edition, tag):
if edition != 'oss':
edition='grafana-enterprise'
else:
edition='grafana'
dockerImage='grafana/{}:{}'.format(edition, tag)
def scan_docker_image_pipeline(tag):
dockerImage = 'grafana/{}:{}'.format('grafana', tag)
return cron_job_pipeline(
cronName='nightly',
@ -51,7 +48,9 @@ def scan_docker_image_pipeline(edition, tag):
scan_docker_image_unkown_low_medium_vulnerabilities_step(dockerImage),
scan_docker_image_high_critical_vulnerabilities_step(dockerImage),
slack_job_failed_step('grafana-backend-ops', dockerImage),
])
],
)
def scan_docker_image_unkown_low_medium_vulnerabilities_step(dockerImage):
return {
@ -62,6 +61,7 @@ def scan_docker_image_unkown_low_medium_vulnerabilities_step(dockerImage):
],
}
def scan_docker_image_high_critical_vulnerabilities_step(dockerImage):
return {
'name': 'scan-high-critical-vulnerabilities',
@ -71,6 +71,7 @@ def scan_docker_image_high_critical_vulnerabilities_step(dockerImage):
],
}
def slack_job_failed_step(channel, image):
return {
'name': 'slack-notify-failure',
@ -78,22 +79,33 @@ def slack_job_failed_step(channel, image):
'settings': {
'webhook': from_secret('slack_webhook_backend'),
'channel': channel,
'template': 'Nightly docker image scan job for ' + image + ' failed: {{build.link}}',
'template': 'Nightly docker image scan job for '
+ image
+ ' failed: {{build.link}}',
},
'when': {
'status': 'failure'
}
'when': {'status': 'failure'},
}
def post_to_grafana_com_step():
return {
'name': 'post-to-grafana-com',
'image': publish_image,
'environment': {
'GRAFANA_COM_API_KEY': from_secret('grafana_api_key'),
'GCP_KEY': from_secret('gcp_key'),
},
'depends_on': ['compile-build-cmd'],
'commands': ['./bin/build publish grafana-com --edition oss'],
}
'name': 'post-to-grafana-com',
'image': publish_image,
'environment': {
'GRAFANA_COM_API_KEY': from_secret('grafana_api_key'),
'GCP_KEY': from_secret('gcp_key'),
},
'depends_on': ['compile-build-cmd'],
'commands': ['./bin/build publish grafana-com --edition oss'],
}
def grafana_com_nightly_pipeline():
return cron_job_pipeline(
cronName='grafana-com-nightly',
name='grafana-com-nightly',
steps=[
compile_build_cmd(),
post_to_grafana_com_step(),
],
)

@ -57,7 +57,9 @@ load('scripts/drone/vault.star', 'from_secret')
ver_mode = 'main'
trigger = {
'event': ['push',],
'event': [
'push',
],
'branch': 'main',
'paths': {
'exclude': [
@ -68,9 +70,12 @@ trigger = {
},
}
def main_pipelines(edition):
def main_pipelines():
drone_change_trigger = {
'event': ['push',],
'event': [
'push',
],
'branch': 'main',
'repo': [
'grafana/grafana',
@ -86,23 +91,36 @@ def main_pipelines(edition):
}
pipelines = [
docs_pipelines(edition, ver_mode, trigger_docs_main()),
test_frontend(trigger, ver_mode),
docs_pipelines(ver_mode, trigger_docs_main()),
test_frontend(trigger, ver_mode, committish='${DRONE_COMMIT}'),
lint_frontend_pipeline(trigger, ver_mode),
test_backend(trigger, ver_mode),
test_backend(trigger, ver_mode, committish='${DRONE_COMMIT}'),
lint_backend_pipeline(trigger, ver_mode),
build_e2e(trigger, ver_mode, edition),
integration_tests(trigger, ver_mode, edition),
windows(trigger, edition, ver_mode),
notify_pipeline(
name='notify-drone-changes', slack_channel='slack-webhooks-test', trigger=drone_change_trigger,
template=drone_change_template, secret='drone-changes-webhook',
),
enterprise_downstream_pipeline(edition, ver_mode),
notify_pipeline(
name='main-notify', slack_channel='grafana-ci-notifications', trigger=dict(trigger, status=['failure']),
depends_on=['main-test-frontend', 'main-test-backend', 'main-build-e2e-publish', 'main-integration-tests', 'main-windows'],
template=failure_template, secret='slack_webhook'
)]
build_e2e(trigger, ver_mode),
integration_tests(trigger, prefix=ver_mode),
windows(trigger, edition='oss', ver_mode=ver_mode),
notify_pipeline(
name='notify-drone-changes',
slack_channel='slack-webhooks-test',
trigger=drone_change_trigger,
template=drone_change_template,
secret='drone-changes-webhook',
),
enterprise_downstream_pipeline(),
notify_pipeline(
name='main-notify',
slack_channel='grafana-ci-notifications',
trigger=dict(trigger, status=['failure']),
depends_on=[
'main-test-frontend',
'main-test-backend',
'main-build-e2e-publish',
'main-integration-tests',
'main-windows',
],
template=failure_template,
secret='slack_webhook',
),
]
return pipelines

@ -64,16 +64,73 @@ trigger = {
}
def pr_pipelines(edition):
def pr_pipelines():
return [
verify_drone(get_pr_trigger(include_paths=['scripts/drone/**', '.drone.yml', '.drone.star']), ver_mode),
test_frontend(get_pr_trigger(exclude_paths=['pkg/**', 'packaging/**', 'go.sum', 'go.mod']), ver_mode),
lint_frontend_pipeline(get_pr_trigger(exclude_paths=['pkg/**', 'packaging/**', 'go.sum', 'go.mod']), ver_mode),
test_backend(get_pr_trigger(include_paths=['pkg/**', 'packaging/**', '.drone.yml', 'conf/**', 'go.sum', 'go.mod', 'public/app/plugins/**/plugin.json', 'devenv/**']), ver_mode),
lint_backend_pipeline(get_pr_trigger(include_paths=['pkg/**', 'packaging/**', 'conf/**', 'go.sum', 'go.mod', 'public/app/plugins/**/plugin.json', 'devenv/**']), ver_mode),
build_e2e(trigger, ver_mode, edition),
integration_tests(get_pr_trigger(include_paths=['pkg/**', 'packaging/**', '.drone.yml', 'conf/**', 'go.sum', 'go.mod', 'public/app/plugins/**/plugin.json']), ver_mode, edition),
docs_pipelines(edition, ver_mode, trigger_docs_pr()),
verify_drone(
get_pr_trigger(
include_paths=['scripts/drone/**', '.drone.yml', '.drone.star']
),
ver_mode,
),
test_frontend(
get_pr_trigger(
exclude_paths=['pkg/**', 'packaging/**', 'go.sum', 'go.mod']
),
ver_mode,
committish='${DRONE_COMMIT}',
),
lint_frontend_pipeline(
get_pr_trigger(
exclude_paths=['pkg/**', 'packaging/**', 'go.sum', 'go.mod']
),
ver_mode,
),
test_backend(
get_pr_trigger(
include_paths=[
'pkg/**',
'packaging/**',
'.drone.yml',
'conf/**',
'go.sum',
'go.mod',
'public/app/plugins/**/plugin.json',
'devenv/**',
]
),
ver_mode,
committish='${DRONE_COMMIT}',
),
lint_backend_pipeline(
get_pr_trigger(
include_paths=[
'pkg/**',
'packaging/**',
'conf/**',
'go.sum',
'go.mod',
'public/app/plugins/**/plugin.json',
'devenv/**',
]
),
ver_mode,
),
build_e2e(trigger, ver_mode),
integration_tests(
get_pr_trigger(
include_paths=[
'pkg/**',
'packaging/**',
'.drone.yml',
'conf/**',
'go.sum',
'go.mod',
'public/app/plugins/**/plugin.json',
]
),
prefix=ver_mode,
),
docs_pipelines(ver_mode, trigger_docs_pr()),
shellcheck_pipeline(),
]
@ -96,4 +153,3 @@ def get_pr_trigger(include_paths=None, exclude_paths=None):
'include': paths_in,
},
}

@ -1,49 +1,48 @@
load(
'scripts/drone/steps/lib.star',
'disable_tests',
'artifacts_page_step',
'benchmark_ldap_step',
'build_backend_step',
'build_docker_images_step',
'build_frontend_package_step',
'build_frontend_step',
'build_image',
'build_plugins_step',
'build_storybook_step',
'clone_enterprise_step',
'compile_build_cmd',
'copy_packages_for_docker_step',
'download_grabpl_step',
'yarn_install_step',
'wire_install_step',
'init_enterprise_step',
'lint_drone_step',
'build_image',
'e2e_tests_artifacts',
'e2e_tests_step',
'fetch_images_step',
'get_windows_steps',
'grafana_server_step',
'identify_runner_step',
'publish_image',
'init_enterprise_step',
'lint_backend_step',
'lint_drone_step',
'lint_frontend_step',
'test_backend_step',
'test_backend_integration_step',
'test_frontend_step',
'build_backend_step',
'build_frontend_step',
'build_frontend_package_step',
'build_plugins_step',
'memcached_integration_tests_step',
'mysql_integration_tests_step',
'package_step',
'grafana_server_step',
'e2e_tests_step',
'e2e_tests_artifacts',
'build_storybook_step',
'copy_packages_for_docker_step',
'build_docker_images_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'publish_grafanacom_step',
'publish_image',
'publish_images_step',
'publish_linux_packages_step',
'redis_integration_tests_step',
'memcached_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'store_storybook_step',
'upload_packages_step',
'publish_grafanacom_step',
'test_backend_integration_step',
'test_backend_step',
'test_frontend_step',
'trigger_oss',
'upload_cdn_step',
'upload_packages_step',
'verify_gen_cue_step',
'verify_gen_jsonnet_step',
'publish_images_step',
'publish_linux_packages_step',
'trigger_oss',
'artifacts_page_step',
'fetch_images_step',
'compile_build_cmd',
'wire_install_step',
'yarn_install_step',
)
load(
@ -59,30 +58,38 @@ load(
'notify_pipeline',
'failure_template',
'drone_change_template',
'with_deps',
)
load(
'scripts/drone/pipelines/test_frontend.star',
'test_frontend',
'test_frontend_enterprise',
)
load(
'scripts/drone/pipelines/test_backend.star',
'test_backend',
'test_backend_enterprise',
)
load('scripts/drone/vault.star', 'from_secret', 'github_token', 'pull_secret', 'drone_token', 'prerelease_bucket')
load(
'scripts/drone/vault.star',
'from_secret',
'pull_secret',
'drone_token',
'prerelease_bucket',
)
ver_mode='release'
ver_mode = 'release'
release_trigger = {
'event': {
'exclude': [
'promote'
]
},
'ref': ['refs/tags/v*',],
'event': {'exclude': ['promote']},
'ref': [
'refs/tags/v*',
],
}
def store_npm_packages_step():
return {
'name': 'store-npm-packages',
@ -92,13 +99,12 @@ def store_npm_packages_step():
],
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret(prerelease_bucket)
'PRERELEASE_BUCKET': from_secret(prerelease_bucket),
},
'commands': [
'./bin/grabpl artifacts npm store --tag ${DRONE_TAG}'
],
'commands': ['./bin/grabpl artifacts npm store --tag ${DRONE_TAG}'],
}
def retrieve_npm_packages_step():
return {
'name': 'retrieve-npm-packages',
@ -109,13 +115,12 @@ def retrieve_npm_packages_step():
'failure': 'ignore',
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret(prerelease_bucket)
'PRERELEASE_BUCKET': from_secret(prerelease_bucket),
},
'commands': [
'./bin/grabpl artifacts npm retrieve --tag ${DRONE_TAG}'
],
'commands': ['./bin/grabpl artifacts npm retrieve --tag ${DRONE_TAG}'],
}
def release_npm_packages_step():
return {
'name': 'release-npm-packages',
@ -127,308 +132,393 @@ def release_npm_packages_step():
'environment': {
'NPM_TOKEN': from_secret('npm_token'),
},
'commands': [
'./bin/grabpl artifacts npm release --tag ${DRONE_TAG}'
],
'commands': ['./bin/grabpl artifacts npm release --tag ${DRONE_TAG}'],
}
def oss_pipelines(ver_mode=ver_mode, trigger=release_trigger):
if ver_mode == 'release':
committish = '${DRONE_TAG}'
elif ver_mode == 'release-branch':
committish = '${DRONE_BRANCH}'
else:
committish = '${DRONE_COMMIT}'
environment = {'EDITION': 'oss'}
edition = 'oss'
services = integration_test_services(edition=edition)
services = integration_test_services(edition='oss')
volumes = integration_test_services_volumes()
package_steps = []
publish_steps = []
should_publish = ver_mode == 'release'
should_upload = should_publish or ver_mode in ('release-branch',)
init_steps = [
identify_runner_step(),
download_grabpl_step(),
verify_gen_cue_step(edition),
verify_gen_cue_step(),
wire_install_step(),
yarn_install_step(),
compile_build_cmd(),
]
build_steps = [
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, ver_mode=ver_mode),
build_backend_step(edition='oss', ver_mode=ver_mode),
build_frontend_step(edition='oss', ver_mode=ver_mode),
build_frontend_package_step(edition='oss', ver_mode=ver_mode),
build_plugins_step(edition='oss', ver_mode=ver_mode),
package_step(edition='oss', ver_mode=ver_mode),
copy_packages_for_docker_step(),
build_docker_images_step(edition='oss', ver_mode=ver_mode, publish=True),
build_docker_images_step(
edition='oss', ver_mode=ver_mode, publish=True, ubuntu=True
),
grafana_server_step(edition='oss'),
e2e_tests_step('dashboards-suite', tries=3),
e2e_tests_step('smoke-tests-suite', tries=3),
e2e_tests_step('panels-suite', tries=3),
e2e_tests_step('various-suite', tries=3),
e2e_tests_artifacts(),
build_storybook_step(ver_mode=ver_mode),
]
publish_steps = []
if ver_mode in (
'release',
'release-branch',
):
publish_steps.extend(
[
upload_cdn_step(edition='oss', ver_mode=ver_mode, trigger=trigger_oss),
upload_packages_step(
edition='oss', ver_mode=ver_mode, trigger=trigger_oss
),
]
)
if ver_mode in ('release',):
publish_steps.extend(
[
store_storybook_step(ver_mode=ver_mode),
store_npm_packages_step(),
]
)
integration_test_steps = [
postgres_integration_tests_step(edition=edition, ver_mode=ver_mode),
mysql_integration_tests_step(edition=edition, ver_mode=ver_mode),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
# Insert remaining steps
build_steps.extend([
package_step(edition=edition, ver_mode=ver_mode),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=True),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=True),
grafana_server_step(edition=edition),
])
if not disable_tests:
build_steps.extend([
e2e_tests_step('dashboards-suite', edition=edition, tries=3),
e2e_tests_step('smoke-tests-suite', edition=edition, tries=3),
e2e_tests_step('panels-suite', edition=edition, tries=3),
e2e_tests_step('various-suite', edition=edition, tries=3),
e2e_tests_artifacts(edition=edition),
])
build_storybook = build_storybook_step(edition=edition, ver_mode=ver_mode)
if build_storybook:
build_steps.append(build_storybook)
if should_upload:
publish_steps.append(upload_cdn_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss))
publish_steps.append(upload_packages_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss))
if should_publish:
publish_step = store_storybook_step(edition=edition, ver_mode=ver_mode)
store_npm_step = store_npm_packages_step()
if publish_step:
publish_steps.append(publish_step)
if store_npm_step:
publish_steps.append(store_npm_step)
windows_package_steps = get_windows_steps(edition=edition, ver_mode=ver_mode)
windows_pipeline = pipeline(
name='{}-oss-windows'.format(ver_mode), edition=edition, trigger=trigger,
steps=[identify_runner_step('windows')] + windows_package_steps,
platform='windows', depends_on=[
'oss-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode),
], environment=environment,
name='{}-oss-windows'.format(ver_mode),
edition='oss',
trigger=trigger,
steps=get_windows_steps(edition='oss', ver_mode=ver_mode),
platform='windows',
depends_on=[
# 'oss-build-e2e-publish-{}'.format(ver_mode),
'{}-oss-build-e2e-publish'.format(ver_mode),
'{}-oss-test-frontend'.format(ver_mode),
'{}-oss-test-backend'.format(ver_mode),
'{}-oss-integration-tests'.format(ver_mode),
],
environment=environment,
)
pipelines = [
pipeline(
name='{}-oss-build{}-publish'.format(ver_mode, get_e2e_suffix()), edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps + package_steps + publish_steps,
environment=environment, volumes=volumes,
name='{}-oss-build-e2e-publish'.format(ver_mode),
edition='oss',
trigger=trigger,
services=[],
steps=init_steps + build_steps + publish_steps,
environment=environment,
volumes=volumes,
),
test_frontend(trigger, ver_mode, committish=committish),
test_backend(trigger, ver_mode, committish=committish),
pipeline(
name='{}-oss-integration-tests'.format(ver_mode),
edition='oss',
trigger=trigger,
services=services,
steps=[
download_grabpl_step(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
]
+ integration_test_steps,
environment=environment,
volumes=volumes,
),
windows_pipeline,
]
if not disable_tests:
pipelines.extend([
test_frontend(trigger, ver_mode),
test_backend(trigger, ver_mode),
pipeline(
name='{}-oss-integration-tests'.format(ver_mode), edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(), verify_gen_cue_step(edition), verify_gen_jsonnet_step(edition), wire_install_step(), ] + integration_test_steps,
environment=environment, volumes=volumes,
)
])
deps = {
'depends_on': [
'{}-oss-build{}-publish'.format(ver_mode, get_e2e_suffix()),
'{}-oss-test-frontend'.format(ver_mode),
'{}-oss-test-backend'.format(ver_mode),
'{}-oss-integration-tests'.format(ver_mode)
]
}
windows_pipeline.update(deps)
pipelines.extend([windows_pipeline])
return pipelines
def enterprise_pipelines(ver_mode=ver_mode, trigger=release_trigger):
if ver_mode == 'release':
committish = '${DRONE_TAG}'
elif ver_mode == 'release-branch':
committish = '${DRONE_BRANCH}'
else:
committish = '${DRONE_COMMIT}'
environment = {'EDITION': 'enterprise'}
edition = 'enterprise'
services = integration_test_services(edition=edition)
services = integration_test_services(edition='enterprise')
volumes = integration_test_services_volumes()
package_steps = []
publish_steps = []
should_publish = ver_mode == 'release'
should_upload = should_publish or ver_mode in ('release-branch',)
include_enterprise = edition == 'enterprise'
edition2 = 'enterprise2'
init_steps = [
download_grabpl_step(),
identify_runner_step(),
clone_enterprise_step(ver_mode),
clone_enterprise_step(committish=committish),
init_enterprise_step(ver_mode),
compile_build_cmd(edition),
]
compile_build_cmd('enterprise'),
] + with_deps(
[
wire_install_step(),
yarn_install_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
],
[
'init-enterprise',
],
)
build_steps = [
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, ver_mode=ver_mode),
]
integration_test_steps = [
postgres_integration_tests_step(edition=edition, ver_mode=ver_mode),
mysql_integration_tests_step(edition=edition, ver_mode=ver_mode),
build_backend_step(edition='enterprise', ver_mode=ver_mode),
build_frontend_step(edition='enterprise', ver_mode=ver_mode),
build_frontend_package_step(edition='enterprise', ver_mode=ver_mode),
build_plugins_step(edition='enterprise', ver_mode=ver_mode),
build_backend_step(
edition='enterprise2', ver_mode=ver_mode, variants=['linux-amd64']
),
package_step(
edition='enterprise',
ver_mode=ver_mode,
include_enterprise2=True,
),
copy_packages_for_docker_step(),
build_docker_images_step(edition='enterprise', ver_mode=ver_mode, publish=True),
build_docker_images_step(
edition='enterprise', ver_mode=ver_mode, publish=True, ubuntu=True
),
grafana_server_step(edition='enterprise'),
e2e_tests_step('dashboards-suite', tries=3),
e2e_tests_step('smoke-tests-suite', tries=3),
e2e_tests_step('panels-suite', tries=3),
e2e_tests_step('various-suite', tries=3),
e2e_tests_artifacts(),
]
if include_enterprise:
build_steps.extend([
build_backend_step(edition=edition2, ver_mode=ver_mode, variants=['linux-amd64']),
])
publish_steps = []
# Insert remaining steps
build_steps.extend([
package_step(edition=edition, ver_mode=ver_mode, include_enterprise2=include_enterprise),
copy_packages_for_docker_step(),
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=True),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=True),
grafana_server_step(edition=edition),
])
if not disable_tests:
build_steps.extend([
e2e_tests_step('dashboards-suite', edition=edition, tries=3),
e2e_tests_step('smoke-tests-suite', edition=edition, tries=3),
e2e_tests_step('panels-suite', edition=edition, tries=3),
e2e_tests_step('various-suite', edition=edition, tries=3),
e2e_tests_artifacts(edition=edition),
])
build_storybook = build_storybook_step(edition=edition, ver_mode=ver_mode)
if build_storybook:
build_steps.append(build_storybook)
if should_upload:
publish_steps.extend([
upload_cdn_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
upload_packages_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
package_step(edition=edition2, ver_mode=ver_mode, include_enterprise2=include_enterprise, variants=['linux-amd64']),
upload_cdn_step(edition=edition2, ver_mode=ver_mode),
])
if should_publish:
publish_step = store_storybook_step(edition=edition, ver_mode=ver_mode)
if publish_step:
publish_steps.append(publish_step)
windows_package_steps = get_windows_steps(edition=edition, ver_mode=ver_mode)
if should_upload:
step = upload_packages_step(edition=edition2, ver_mode=ver_mode)
if step:
publish_steps.append(step)
deps_on_clone_enterprise_step = {
'depends_on': [
'init-enterprise',
]
}
if ver_mode in (
'release',
'release-branch',
):
upload_packages_enterprise = upload_packages_step(
edition='enterprise', ver_mode=ver_mode, trigger=trigger_oss
)
upload_packages_enterprise['depends_on'] = ['package']
upload_packages_enterprise2 = upload_packages_step(
edition='enterprise2', ver_mode=ver_mode
)
upload_packages_enterprise2['depends_on'] = ['package-enterprise2']
publish_steps.extend(
[
upload_cdn_step(
edition='enterprise', ver_mode=ver_mode, trigger=trigger_oss
),
upload_packages_enterprise,
package_step(
edition='enterprise2',
ver_mode=ver_mode,
include_enterprise2=True,
variants=['linux-amd64'],
),
upload_cdn_step(edition='enterprise2', ver_mode=ver_mode),
upload_packages_enterprise2,
]
)
for step in [wire_install_step(), yarn_install_step(edition), verify_gen_cue_step(edition), verify_gen_jsonnet_step(edition)]:
step.update(deps_on_clone_enterprise_step)
init_steps.extend([step])
integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
windows_pipeline = pipeline(
name='{}-enterprise-windows'.format(ver_mode), edition=edition, trigger=trigger,
steps=[identify_runner_step('windows')] + windows_package_steps,
platform='windows', depends_on=[
'enterprise-build{}-publish-{}'.format(get_e2e_suffix(), ver_mode),
], environment=environment,
name='{}-enterprise-windows'.format(ver_mode),
edition='enterprise',
trigger=trigger,
steps=get_windows_steps(edition='enterprise', ver_mode=ver_mode),
platform='windows',
depends_on=[
# 'enterprise-build-e2e-publish-{}'.format(ver_mode),
'{}-enterprise-build-e2e-publish'.format(ver_mode),
'{}-enterprise-test-frontend'.format(ver_mode),
'{}-enterprise-test-backend'.format(ver_mode),
'{}-enterprise-integration-tests'.format(ver_mode),
],
environment=environment,
)
pipelines = [
pipeline(
name='{}-enterprise-build{}-publish'.format(ver_mode, get_e2e_suffix()), edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps + package_steps + publish_steps, environment=environment,
name='{}-enterprise-build-e2e-publish'.format(ver_mode),
edition='enterprise',
trigger=trigger,
services=[],
steps=init_steps + build_steps + publish_steps,
environment=environment,
volumes=volumes,
),
]
if not disable_tests:
pipelines.extend([
test_frontend(trigger, ver_mode, edition),
test_backend(trigger, ver_mode, edition),
pipeline(
name='{}-enterprise-integration-tests'.format(ver_mode), edition=edition, trigger=trigger, services=services,
steps=[download_grabpl_step(), identify_runner_step(), clone_enterprise_step(ver_mode), init_enterprise_step(ver_mode), verify_gen_cue_step(edition), verify_gen_jsonnet_step(edition), wire_install_step()] + integration_test_steps + [redis_integration_tests_step(), memcached_integration_tests_step()],
environment=environment, volumes=volumes,
),
])
deps = {
'depends_on': [
'{}-enterprise-build{}-publish'.format(ver_mode, get_e2e_suffix()),
'{}-enterprise-test-frontend'.format(ver_mode),
'{}-enterprise-test-backend'.format(ver_mode),
'{}-enterprise-integration-tests'.format(ver_mode)
test_frontend_enterprise(trigger, ver_mode, committish=committish),
test_backend_enterprise(trigger, ver_mode, committish=committish),
pipeline(
name='{}-enterprise-integration-tests'.format(ver_mode),
edition='enterprise',
trigger=trigger,
services=services,
steps=[
download_grabpl_step(),
identify_runner_step(),
clone_enterprise_step(committish=committish),
init_enterprise_step(ver_mode),
]
}
windows_pipeline.update(deps)
pipelines.extend([windows_pipeline])
+ with_deps(
[
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
],
[
'init-enterprise',
],
)
+ [
wire_install_step(),
]
+ integration_test_steps
+ [
redis_integration_tests_step(),
memcached_integration_tests_step(),
],
environment=environment,
volumes=volumes,
),
windows_pipeline,
]
return pipelines
def enterprise2_pipelines(prefix='', ver_mode=ver_mode, trigger=release_trigger):
if ver_mode == 'release':
committish = '${DRONE_TAG}'
elif ver_mode == 'release-branch':
committish = '${DRONE_BRANCH}'
else:
committish = '${DRONE_COMMIT}'
environment = {
'EDITION': 'enterprise2',
}
edition = 'enterprise'
services = integration_test_services(edition=edition)
services = integration_test_services(edition='enterprise')
volumes = integration_test_services_volumes()
package_steps = []
publish_steps = []
should_publish = ver_mode == 'release'
should_upload = should_publish or ver_mode in ('release-branch',)
include_enterprise = edition == 'enterprise'
edition2 = 'enterprise2'
init_steps = [
download_grabpl_step(),
identify_runner_step(),
clone_enterprise_step(ver_mode),
clone_enterprise_step(committish=committish),
init_enterprise_step(ver_mode),
compile_build_cmd(edition),
]
compile_build_cmd('enterprise'),
] + with_deps(
[
wire_install_step(),
yarn_install_step(),
verify_gen_cue_step(),
],
[
'init-enterprise',
],
)
build_steps = [
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition='enterprise', ver_mode=ver_mode),
build_frontend_package_step(edition='enterprise', ver_mode=ver_mode),
build_plugins_step(edition='enterprise', ver_mode=ver_mode),
build_backend_step(
edition='enterprise2', ver_mode=ver_mode, variants=['linux-amd64']
),
]
if include_enterprise:
build_steps.extend([
build_backend_step(edition=edition2, ver_mode=ver_mode, variants=['linux-amd64']),
])
fetch_images = fetch_images_step(edition2)
fetch_images.update({'depends_on': ['build-docker-images', 'build-docker-images-ubuntu']})
upload_cdn = upload_cdn_step(edition=edition2, ver_mode=ver_mode)
upload_cdn['environment'].update({'ENTERPRISE2_CDN_PATH': from_secret('enterprise2-cdn-path')})
build_steps.extend([
package_step(edition=edition2, ver_mode=ver_mode, include_enterprise2=include_enterprise, variants=['linux-amd64']),
upload_cdn,
copy_packages_for_docker_step(edition=edition2),
build_docker_images_step(edition=edition2, ver_mode=ver_mode, publish=True),
build_docker_images_step(edition=edition2, ver_mode=ver_mode, ubuntu=True, publish=True),
fetch_images,
publish_images_step(edition2, 'release', mode=edition2, docker_repo='${{DOCKER_ENTERPRISE2_REPO}}'),
])
if should_upload:
step = upload_packages_step(edition=edition2, ver_mode=ver_mode)
if step:
publish_steps.append(step)
deps_on_clone_enterprise_step = {
'depends_on': [
'init-enterprise',
fetch_images = fetch_images_step('enterprise2')
fetch_images.update(
{'depends_on': ['build-docker-images', 'build-docker-images-ubuntu']}
)
upload_cdn = upload_cdn_step(edition='enterprise2', ver_mode=ver_mode)
upload_cdn['environment'].update(
{'ENTERPRISE2_CDN_PATH': from_secret('enterprise2-cdn-path')}
)
build_steps.extend(
[
package_step(
edition='enterprise2',
ver_mode=ver_mode,
include_enterprise2=True,
variants=['linux-amd64'],
),
upload_cdn,
copy_packages_for_docker_step(edition='enterprise2'),
build_docker_images_step(
edition='enterprise2', ver_mode=ver_mode, publish=True
),
build_docker_images_step(
edition='enterprise2', ver_mode=ver_mode, publish=True, ubuntu=True
),
fetch_images,
publish_images_step(
'enterprise2',
'release',
mode='enterprise2',
docker_repo='${{DOCKER_ENTERPRISE2_REPO}}',
),
]
}
)
publish_steps = []
if ver_mode in (
'release',
'release-branch',
):
step = upload_packages_step(edition='enterprise2', ver_mode=ver_mode)
step['depends_on'] = ['package-enterprise2']
for step in [wire_install_step(), yarn_install_step(), verify_gen_cue_step(edition)]:
step.update(deps_on_clone_enterprise_step)
init_steps.extend([step])
publish_steps.append(step)
pipelines = [
pipeline(
name='{}{}-enterprise2-build{}-publish'.format(prefix, ver_mode, get_e2e_suffix()), edition=edition, trigger=trigger, services=[],
steps=init_steps + build_steps + package_steps + publish_steps,
volumes=volumes, environment=environment,
name='{}{}-enterprise2-build-e2e-publish'.format(prefix, ver_mode),
edition='enterprise',
trigger=trigger,
services=[],
steps=init_steps + build_steps + publish_steps,
volumes=volumes,
environment=environment,
),
]
return pipelines
def publish_artifacts_step(mode):
security = ''
if mode == 'security':
@ -440,10 +530,15 @@ def publish_artifacts_step(mode):
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret('prerelease_bucket'),
},
'commands': ['./bin/grabpl artifacts publish {}--tag $${{DRONE_TAG}} --src-bucket $${{PRERELEASE_BUCKET}}'.format(security)],
'commands': [
'./bin/grabpl artifacts publish {}--tag $${{DRONE_TAG}} --src-bucket $${{PRERELEASE_BUCKET}}'.format(
security
)
],
'depends_on': ['grabpl'],
}
def publish_artifacts_pipelines(mode):
trigger = {
'event': ['promote'],
@ -454,9 +549,16 @@ def publish_artifacts_pipelines(mode):
publish_artifacts_step(mode),
]
return [pipeline(
name='publish-artifacts-{}'.format(mode), trigger=trigger, steps=steps, edition="all", environment = {'EDITION': 'all'}
)]
return [
pipeline(
name='publish-artifacts-{}'.format(mode),
trigger=trigger,
steps=steps,
edition="all",
environment={'EDITION': 'all'},
)
]
def publish_packages_pipeline():
trigger = {
@ -481,40 +583,63 @@ def publish_packages_pipeline():
deps = [
'publish-artifacts-public',
'publish-docker-oss-public',
'publish-docker-enterprise-public'
'publish-docker-enterprise-public',
]
return [
pipeline(
name='publish-packages-oss',
trigger=trigger,
steps=oss_steps,
edition="all",
depends_on=deps,
environment={'EDITION': 'oss'},
),
pipeline(
name='publish-packages-enterprise',
trigger=trigger,
steps=enterprise_steps,
edition="all",
depends_on=deps,
environment={'EDITION': 'enterprise'},
),
]
return [pipeline(
name='publish-packages-oss', trigger=trigger, steps=oss_steps, edition="all", depends_on=deps, environment = {'EDITION': 'oss'},
), pipeline(
name='publish-packages-enterprise', trigger=trigger, steps=enterprise_steps, edition="all", depends_on=deps, environment = {'EDITION': 'enterprise'}
)]
def publish_npm_pipelines(mode):
def publish_npm_pipelines():
trigger = {
'event': ['promote'],
'target': [mode],
'target': ['public'],
}
steps = [
download_grabpl_step(),
yarn_install_step(),
retrieve_npm_packages_step(),
release_npm_packages_step()
release_npm_packages_step(),
]
return [
pipeline(
name='publish-npm-packages-public',
trigger=trigger,
steps=steps,
edition="all",
environment={'EDITION': 'all'},
)
]
return [pipeline(
name='publish-npm-packages-{}'.format(mode), trigger=trigger, steps = steps, edition="all", environment = {'EDITION': 'all'},
)]
def artifacts_page_pipeline():
trigger = {
'event': ['promote'],
'target': 'security',
}
return [pipeline(name='publish-artifacts-page', trigger=trigger, steps = [download_grabpl_step(), artifacts_page_step()], edition="all", environment = {'EDITION': 'all'}
)]
def get_e2e_suffix():
if not disable_tests:
return '-e2e'
return ''
return [
pipeline(
name='publish-artifacts-page',
trigger=trigger,
steps=[download_grabpl_step(), artifacts_page_step()],
edition="all",
environment={'EDITION': 'all'},
)
]

@ -1,42 +1,41 @@
load(
'scripts/drone/steps/lib.star',
'download_grabpl_step',
'build_image',
'identify_runner_step',
'wire_install_step',
'yarn_install_step',
'benchmark_ldap_step',
'betterer_frontend_step',
'build_backend_step',
'build_frontend_step',
'build_docker_images_step',
'build_frontend_package_step',
'build_frontend_step',
'build_image',
'build_plugins_step',
'package_step',
'grafana_server_step',
'e2e_tests_step',
'e2e_tests_artifacts',
'build_storybook_step',
'cloud_plugins_e2e_tests_step',
'compile_build_cmd',
'copy_packages_for_docker_step',
'build_docker_images_step',
'publish_images_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'redis_integration_tests_step',
'memcached_integration_tests_step',
'get_windows_steps',
'benchmark_ldap_step',
'download_grabpl_step',
'e2e_tests_artifacts',
'e2e_tests_step',
'enterprise_downstream_step',
'frontend_metrics_step',
'store_storybook_step',
'grafana_server_step',
'identify_runner_step',
'memcached_integration_tests_step',
'mysql_integration_tests_step',
'package_step',
'postgres_integration_tests_step',
'publish_images_step',
'redis_integration_tests_step',
'release_canary_npm_packages_step',
'upload_packages_step',
'upload_cdn_step',
'verify_gen_cue_step',
'verify_gen_jsonnet_step',
'store_storybook_step',
'test_a11y_frontend_step',
'trigger_oss',
'betterer_frontend_step',
'trigger_test_release',
'compile_build_cmd',
'cloud_plugins_e2e_tests_step',
'upload_cdn_step',
'upload_packages_step',
'verify_gen_cue_step',
'verify_gen_jsonnet_step',
'wire_install_step',
'yarn_install_step',
)
load(
@ -44,68 +43,118 @@ load(
'pipeline',
)
def build_e2e(trigger, ver_mode, edition):
def build_e2e(trigger, ver_mode):
edition = 'oss'
environment = {'EDITION': edition}
variants = ['linux-amd64', 'linux-amd64-musl', 'darwin-amd64', 'windows-amd64',]
init_steps = [
identify_runner_step(),
download_grabpl_step(),
compile_build_cmd(),
verify_gen_cue_step(edition="oss"),
verify_gen_jsonnet_step(edition="oss"),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
yarn_install_step(),
]
build_steps = []
variants = None
if ver_mode == 'pr':
build_steps.extend([trigger_test_release()])
build_steps.extend([enterprise_downstream_step(edition=edition, ver_mode=ver_mode)])
build_steps.extend([
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, ver_mode=ver_mode),
])
if ver_mode == 'main':
build_steps.extend([package_step(edition=edition, ver_mode=ver_mode)])
elif ver_mode == 'pr':
build_steps.extend([package_step(edition=edition, ver_mode=ver_mode, variants=variants)])
build_steps.extend(
[
trigger_test_release(),
enterprise_downstream_step(ver_mode=ver_mode),
]
)
build_steps.extend([
grafana_server_step(edition=edition),
e2e_tests_step('dashboards-suite', edition=edition),
e2e_tests_step('smoke-tests-suite', edition=edition),
e2e_tests_step('panels-suite', edition=edition),
e2e_tests_step('various-suite', edition=edition),
cloud_plugins_e2e_tests_step('cloud-plugins-suite', edition=edition, cloud='azure', trigger=trigger_oss),
e2e_tests_artifacts(edition=edition),
build_storybook_step(edition=edition, ver_mode=ver_mode),
copy_packages_for_docker_step(),
test_a11y_frontend_step(ver_mode=ver_mode, edition=edition),
])
if ver_mode == 'main':
build_steps.extend([
store_storybook_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
frontend_metrics_step(edition=edition, trigger=trigger_oss)
])
variants = [
'linux-amd64',
'linux-amd64-musl',
'darwin-amd64',
'windows-amd64',
]
build_steps.extend(
[
build_backend_step(edition=edition, ver_mode=ver_mode),
build_frontend_step(edition=edition, ver_mode=ver_mode),
build_frontend_package_step(edition=edition, ver_mode=ver_mode),
build_plugins_step(edition=edition, ver_mode=ver_mode),
package_step(edition=edition, ver_mode=ver_mode, variants=variants),
grafana_server_step(edition=edition),
e2e_tests_step('dashboards-suite'),
e2e_tests_step('smoke-tests-suite'),
e2e_tests_step('panels-suite'),
e2e_tests_step('various-suite'),
cloud_plugins_e2e_tests_step(
'cloud-plugins-suite',
cloud='azure',
trigger=trigger_oss,
),
e2e_tests_artifacts(),
build_storybook_step(ver_mode=ver_mode),
copy_packages_for_docker_step(),
test_a11y_frontend_step(ver_mode=ver_mode),
]
)
if ver_mode == 'main':
build_steps.extend([
build_docker_images_step(edition=edition, ver_mode=ver_mode, publish=False),
build_docker_images_step(edition=edition, ver_mode=ver_mode, ubuntu=True, publish=False),
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana', trigger=trigger_oss),
publish_images_step(edition=edition, ver_mode=ver_mode, mode='', docker_repo='grafana-oss', trigger=trigger_oss),
release_canary_npm_packages_step(edition, trigger=trigger_oss),
upload_packages_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss),
upload_cdn_step(edition=edition, ver_mode=ver_mode, trigger=trigger_oss)
])
build_steps.extend(
[
store_storybook_step(ver_mode=ver_mode, trigger=trigger_oss),
frontend_metrics_step(trigger=trigger_oss),
build_docker_images_step(
edition=edition, ver_mode=ver_mode, publish=False
),
build_docker_images_step(
edition=edition, ver_mode=ver_mode, publish=False, ubuntu=True
),
publish_images_step(
edition=edition,
ver_mode=ver_mode,
mode='',
docker_repo='grafana',
trigger=trigger_oss,
),
publish_images_step(
edition=edition,
ver_mode=ver_mode,
mode='',
docker_repo='grafana-oss',
trigger=trigger_oss,
),
release_canary_npm_packages_step(trigger=trigger_oss),
upload_packages_step(
edition=edition, ver_mode=ver_mode, trigger=trigger_oss
),
upload_cdn_step(
edition=edition, ver_mode=ver_mode, trigger=trigger_oss
),
]
)
elif ver_mode == 'pr':
build_steps.extend([build_docker_images_step(edition=edition, ver_mode=ver_mode, archs=['amd64', ])])
build_steps.extend(
[
build_docker_images_step(
edition=edition,
ver_mode=ver_mode,
archs=[
'amd64',
],
)
]
)
publish_suffix = ''
if ver_mode == 'main':
publish_suffix = '-publish'
return pipeline(
name='{}-build-e2e{}'.format(ver_mode, publish_suffix), edition="oss", trigger=trigger, services=[], steps=init_steps + build_steps, environment=environment,
name='{}-build-e2e{}'.format(ver_mode, publish_suffix),
edition="oss",
trigger=trigger,
services=[],
steps=init_steps + build_steps,
environment=environment,
)

@ -31,8 +31,9 @@ docs_paths = {
],
}
def docs_pipelines(edition, ver_mode, trigger):
environment = {'EDITION': edition}
def docs_pipelines(ver_mode, trigger):
environment = {'EDITION': 'oss'}
steps = [
download_grabpl_step(),
identify_runner_step(),
@ -43,9 +44,15 @@ def docs_pipelines(edition, ver_mode, trigger):
]
return pipeline(
name='{}-docs'.format(ver_mode), edition=edition, trigger=trigger, services=[], steps=steps, environment=environment,
name='{}-docs'.format(ver_mode),
edition='oss',
trigger=trigger,
services=[],
steps=steps,
environment=environment,
)
def lint_docs():
return {
'name': 'lint-docs',
@ -71,6 +78,7 @@ def trigger_docs_main():
'paths': docs_paths,
}
def trigger_docs_pr():
return {
'event': [

@ -1,13 +1,13 @@
load(
'scripts/drone/steps/lib.star',
'identify_runner_step',
'compile_build_cmd',
'download_grabpl_step',
'identify_runner_step',
'verify_gen_cue_step',
'verify_gen_jsonnet_step',
'wire_install_step',
'postgres_integration_tests_step',
'mysql_integration_tests_step',
'compile_build_cmd',
)
load(
@ -22,24 +22,33 @@ load(
'pipeline',
)
def integration_tests(trigger, ver_mode, edition):
environment = {'EDITION': edition}
services = integration_test_services(edition)
def integration_tests(trigger, prefix):
environment = {'EDITION': 'oss'}
services = integration_test_services(edition="oss")
volumes = integration_test_services_volumes()
init_steps = [
download_grabpl_step(),
compile_build_cmd(),
identify_runner_step(),
verify_gen_cue_step(edition="oss"),
verify_gen_jsonnet_step(edition="oss"),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
]
test_steps = [
postgres_integration_tests_step(edition=edition, ver_mode=ver_mode),
mysql_integration_tests_step(edition=edition, ver_mode=ver_mode),
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
return pipeline(
name='{}-integration-tests'.format(ver_mode), edition="oss", trigger=trigger, services=services, steps=init_steps + test_steps,
environment=environment, volumes=volumes
name='{}-integration-tests'.format(prefix),
edition='oss',
trigger=trigger,
environment=environment,
services=services,
volumes=volumes,
steps=init_steps + test_steps,
)

@ -12,21 +12,31 @@ load(
'pipeline',
)
def lint_backend_pipeline(trigger, ver_mode):
environment = {'EDITION': 'oss'}
wire_step = wire_install_step()
wire_step.update({ 'depends_on': [] })
wire_step.update({'depends_on': []})
init_steps = [
identify_runner_step(),
compile_build_cmd(),
wire_step,
]
test_steps = [
lint_backend_step(edition="oss"),
lint_backend_step(),
]
if ver_mode == 'main':
test_steps.extend([lint_drone_step()])
test_steps.append(lint_drone_step())
return pipeline(
name='{}-lint-backend'.format(ver_mode), edition="oss", trigger=trigger, services=[], steps=init_steps + test_steps, environment=environment,
name='{}-lint-backend'.format(ver_mode),
edition="oss",
trigger=trigger,
services=[],
steps=init_steps + test_steps,
environment=environment,
)

@ -10,17 +10,24 @@ load(
'pipeline',
)
def lint_frontend_pipeline(trigger, ver_mode):
environment = {'EDITION': 'oss'}
yarn_step = yarn_install_step()
yarn_step.update({ 'depends_on': [] })
init_steps = [
identify_runner_step(),
yarn_step,
yarn_install_step(),
]
test_steps = [
lint_frontend_step(),
]
return pipeline(
name='{}-lint-frontend'.format(ver_mode), edition="oss", trigger=trigger, services=[], steps=init_steps + test_steps, environment=environment,
name='{}-lint-frontend'.format(ver_mode),
edition="oss",
trigger=trigger,
services=[],
steps=init_steps + test_steps,
environment=environment,
)

@ -14,9 +14,6 @@ load(
def publish_image_steps(edition, mode, docker_repo):
additional_docker_repo = ""
if edition == 'oss':
additional_docker_repo='grafana/grafana-oss'
steps = [
identify_runner_step(),
download_grabpl_step(),
@ -24,29 +21,55 @@ def publish_image_steps(edition, mode, docker_repo):
fetch_images_step(edition),
publish_images_step(edition, 'release', mode, docker_repo),
]
if additional_docker_repo != "":
steps.extend([publish_images_step(edition, 'release', mode, additional_docker_repo)])
if edition == 'oss':
steps.append(
publish_images_step(edition, 'release', mode, 'grafana/grafana-oss')
)
return steps
def publish_image_pipelines_public():
mode='public'
mode = 'public'
trigger = {
'event': ['promote'],
'target': [mode],
}
return [pipeline(
name='publish-docker-oss-{}'.format(mode), trigger=trigger, steps=publish_image_steps(edition='oss', mode=mode, docker_repo='grafana'), edition="", environment = {'EDITION': 'oss'}
), pipeline(
name='publish-docker-enterprise-{}'.format(mode), trigger=trigger, steps=publish_image_steps(edition='enterprise', mode=mode, docker_repo='grafana-enterprise'), edition="", environment = {'EDITION': 'enterprise'}
),]
return [
pipeline(
name='publish-docker-oss-{}'.format(mode),
trigger=trigger,
steps=publish_image_steps(edition='oss', mode=mode, docker_repo='grafana'),
edition="",
environment={'EDITION': 'oss'},
),
pipeline(
name='publish-docker-enterprise-{}'.format(mode),
trigger=trigger,
steps=publish_image_steps(
edition='enterprise', mode=mode, docker_repo='grafana-enterprise'
),
edition="",
environment={'EDITION': 'enterprise'},
),
]
def publish_image_pipelines_security():
mode='security'
mode = 'security'
trigger = {
'event': ['promote'],
'target': [mode],
}
return [pipeline(
name='publish-docker-enterprise-{}'.format(mode), trigger=trigger, steps=publish_image_steps(edition='enterprise', mode=mode, docker_repo='grafana-enterprise'), edition="", environment = {'EDITION': 'enterprise'}
),]
return [
pipeline(
name='publish-docker-enterprise-{}'.format(mode),
trigger=trigger,
steps=publish_image_steps(
edition='enterprise', mode=mode, docker_repo='grafana-enterprise'
),
edition="",
environment={'EDITION': 'enterprise'},
),
]

@ -1,8 +1,4 @@
load(
'scripts/drone/steps/lib.star',
'build_image',
'compile_build_cmd'
)
load('scripts/drone/steps/lib.star', 'build_image', 'compile_build_cmd')
load(
'scripts/drone/utils/utils.star',
@ -19,12 +15,11 @@ trigger = {
'docs/**',
'latest.json',
],
'include': [
'scripts/**/*.sh'
],
'include': ['scripts/**/*.sh'],
},
}
def shellcheck_step():
return {
'name': 'shellcheck',
@ -37,6 +32,7 @@ def shellcheck_step():
],
}
def shellcheck_pipeline():
environment = {'EDITION': 'oss'}
steps = [
@ -44,6 +40,10 @@ def shellcheck_pipeline():
shellcheck_step(),
]
return pipeline(
name='pr-shellcheck', edition="oss", trigger=trigger, services=[], steps=steps, environment=environment,
name='pr-shellcheck',
edition="oss",
trigger=trigger,
services=[],
steps=steps,
environment=environment,
)

@ -15,28 +15,71 @@ load(
load(
'scripts/drone/utils/utils.star',
'pipeline',
'with_deps',
)
def test_backend(trigger, ver_mode, edition="oss"):
environment = {'EDITION': edition}
init_steps = []
if edition != 'oss':
init_steps.extend([clone_enterprise_step(ver_mode), download_grabpl_step(), init_enterprise_step(ver_mode),])
init_steps.extend([
def test_backend(trigger, ver_mode, committish):
environment = {'EDITION': 'oss'}
steps = [
identify_runner_step(),
compile_build_cmd(edition),
verify_gen_cue_step(edition),
verify_gen_jsonnet_step(edition),
compile_build_cmd(edition='oss'),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
])
test_steps = [
test_backend_step(edition),
test_backend_integration_step(edition),
test_backend_step(),
test_backend_integration_step(),
]
pipeline_name = '{}-test-backend'.format(ver_mode)
if ver_mode in ("release-branch", "release"):
pipeline_name = '{}-{}-test-backend'.format(ver_mode, 'oss')
return pipeline(
name=pipeline_name,
edition='oss',
trigger=trigger,
steps=steps,
environment=environment,
)
def test_backend_enterprise(trigger, ver_mode, committish, edition="enterprise"):
environment = {'EDITION': edition}
steps = (
[
clone_enterprise_step(committish),
download_grabpl_step(),
init_enterprise_step(ver_mode),
identify_runner_step(),
compile_build_cmd(edition),
]
+ with_deps(
[
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
],
[
'init-enterprise',
],
)
+ [
wire_install_step(),
test_backend_step(),
test_backend_integration_step(),
]
)
pipeline_name = '{}-test-backend'.format(ver_mode)
if ver_mode in ("release-branch", "release"):
pipeline_name = '{}-{}-test-backend'.format(ver_mode, edition)
return pipeline(
name=pipeline_name, edition=edition, trigger=trigger, services=[], steps=init_steps + test_steps, environment=environment
name=pipeline_name,
edition=edition,
trigger=trigger,
steps=steps,
environment=environment,
)

@ -12,25 +12,59 @@ load(
load(
'scripts/drone/utils/utils.star',
'pipeline',
'with_deps',
)
def test_frontend(trigger, ver_mode, edition="oss"):
environment = {'EDITION': edition}
init_steps = []
if edition != 'oss':
init_steps.extend([clone_enterprise_step(ver_mode), init_enterprise_step(ver_mode),])
init_steps.extend([
def test_frontend(trigger, ver_mode, committish):
environment = {'EDITION': 'oss'}
steps = [
identify_runner_step(),
download_grabpl_step(),
yarn_install_step(edition),
])
test_steps = [
betterer_frontend_step(edition),
test_frontend_step(edition),
yarn_install_step(),
betterer_frontend_step(edition='oss'),
test_frontend_step(edition='oss'),
]
pipeline_name = '{}-test-frontend'.format(ver_mode)
if ver_mode in ("release-branch", "release"):
pipeline_name = '{}-{}-test-frontend'.format(ver_mode, 'oss')
return pipeline(
name=pipeline_name,
edition='oss',
trigger=trigger,
steps=steps,
environment=environment,
)
def test_frontend_enterprise(trigger, ver_mode, committish, edition='enterprise'):
environment = {'EDITION': edition}
steps = (
[
clone_enterprise_step(committish),
init_enterprise_step(ver_mode),
identify_runner_step(),
download_grabpl_step(),
]
+ with_deps([yarn_install_step()], ['init-enterprise'])
+ [
betterer_frontend_step(edition),
test_frontend_step(edition),
]
)
pipeline_name = '{}-test-frontend'.format(ver_mode)
if ver_mode in ("release-branch", "release"):
pipeline_name = '{}-{}-test-frontend'.format(ver_mode, edition)
return pipeline(
name=pipeline_name, edition=edition, trigger=trigger, services=[], steps=init_steps + test_steps, environment=environment,
name=pipeline_name,
edition=edition,
trigger=trigger,
steps=steps,
environment=environment,
)

@ -9,7 +9,9 @@ load(
)
trigger = {
'event': ['push',],
'event': [
'push',
],
'branch': 'main',
'paths': {
'exclude': [
@ -20,10 +22,22 @@ trigger = {
},
}
def enterprise_downstream_pipeline(edition, ver_mode):
environment = {'EDITION': edition}
steps = [enterprise_downstream_step(edition, ver_mode)]
deps = ['main-build-e2e-publish', 'main-integration-tests']
def enterprise_downstream_pipeline():
environment = {'EDITION': 'oss'}
steps = [
enterprise_downstream_step(ver_mode='main'),
]
deps = [
'main-build-e2e-publish',
'main-integration-tests',
]
return pipeline(
name='main-trigger-downstream', edition=edition, trigger=trigger, services=[], steps=steps, depends_on=deps, environment=environment,
)
name='main-trigger-downstream',
edition='oss',
trigger=trigger,
services=[],
steps=steps,
depends_on=deps,
environment=environment,
)

@ -11,6 +11,7 @@ load(
'pipeline',
)
def verify_drone(trigger, ver_mode):
environment = {'EDITION': 'oss'}
steps = [
@ -20,5 +21,10 @@ def verify_drone(trigger, ver_mode):
lint_drone_step(),
]
return pipeline(
name='{}-verify-drone'.format(ver_mode), edition="oss", trigger=trigger, services=[], steps=steps, environment=environment,
name='{}-verify-drone'.format(ver_mode),
edition="oss",
trigger=trigger,
services=[],
steps=steps,
environment=environment,
)

@ -1,8 +1,6 @@
load(
'scripts/drone/steps/lib.star',
'grabpl_version',
'wix_image',
'identify_runner_step',
'get_windows_steps',
)
load(
@ -10,126 +8,21 @@ load(
'pipeline',
)
load('scripts/drone/vault.star', 'from_secret', 'prerelease_bucket', 'github_token')
def windows(trigger, edition, ver_mode):
environment = {'EDITION': edition}
init_cmds = []
sfx = ''
if edition in ('enterprise', 'enterprise2'):
sfx = '-{}'.format(edition)
else:
init_cmds.extend([
'$$ProgressPreference = "SilentlyContinue"',
'Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe'.format(
grabpl_version),
])
steps = [
{
'name': 'windows-init',
'image': wix_image,
'commands': init_cmds,
},
]
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2'))) or ver_mode in (
'release', 'release-branch',
):
bucket = '%PRERELEASE_BUCKET%/artifacts/downloads'
if ver_mode == 'release':
ver_part = '${DRONE_TAG}'
dir = 'release'
else:
dir = 'main'
bucket = 'grafana-downloads'
build_no = 'DRONE_BUILD_NUMBER'
ver_part = '--build-id $$env:{}'.format(build_no)
installer_commands = [
'$$gcpKey = $$env:GCP_KEY',
'[System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($$gcpKey)) > gcpkey.json',
# gcloud fails to read the file unless converted with dos2unix
'dos2unix gcpkey.json',
'gcloud auth activate-service-account --key-file=gcpkey.json',
'rm gcpkey.json',
'cp C:\\App\\nssm-2.24.zip .',
]
if (ver_mode == 'main' and (edition not in ('enterprise', 'enterprise2'))) or ver_mode in (
'release',
):
installer_commands.extend([
'.\\grabpl.exe windows-installer --edition {} {}'.format(edition, ver_part),
'$$fname = ((Get-Childitem grafana*.msi -name) -split "`n")[0]',
])
if ver_mode == 'main':
installer_commands.extend([
'gsutil cp $$fname gs://{}/{}/{}/'.format(bucket, edition, dir),
'gsutil cp "$$fname.sha256" gs://{}/{}/{}/'.format(bucket, edition, dir),
])
else:
installer_commands.extend([
'gsutil cp $$fname gs://{}/{}/{}/{}/'.format(bucket, ver_part, edition, dir),
'gsutil cp "$$fname.sha256" gs://{}/{}/{}/{}/'.format(bucket, ver_part, edition, dir),
])
steps.append({
'name': 'build-windows-installer',
'image': wix_image,
'depends_on': [
'windows-init',
],
'environment': {
'GCP_KEY': from_secret('gcp_key'),
'PRERELEASE_BUCKET': from_secret(prerelease_bucket),
'GITHUB_TOKEN': from_secret('github_token')
},
'commands': installer_commands,
})
if edition in ('enterprise', 'enterprise2'):
if ver_mode == 'release':
committish = '${DRONE_TAG}'
elif ver_mode == 'release-branch':
committish = '$$env:DRONE_BRANCH'
else:
committish = '$$env:DRONE_COMMIT'
# For enterprise, we have to clone both OSS and enterprise and merge the latter into the former
download_grabpl_step_cmds = [
'$$ProgressPreference = "SilentlyContinue"',
'Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe'.format(
grabpl_version),
]
clone_cmds = [
'git clone "https://$$env:GITHUB_TOKEN@github.com/grafana/grafana-enterprise.git"',
]
clone_cmds.extend([
'cd grafana-enterprise',
'git checkout {}'.format(committish),
])
steps.insert(0, {
'name': 'clone',
'image': wix_image,
'environment': {
'GITHUB_TOKEN': from_secret(github_token),
},
'commands': download_grabpl_step_cmds + clone_cmds,
})
steps[1]['depends_on'] = [
'clone',
]
steps[1]['commands'].extend([
# Need to move grafana-enterprise out of the way, so directory is empty and can be cloned into
'cp -r grafana-enterprise C:\\App\\grafana-enterprise',
'rm -r -force grafana-enterprise',
'cp grabpl.exe C:\\App\\grabpl.exe',
'rm -force grabpl.exe',
'C:\\App\\grabpl.exe init-enterprise --github-token $$env:GITHUB_TOKEN C:\\App\\grafana-enterprise',
'cp C:\\App\\grabpl.exe grabpl.exe',
])
if 'environment' in steps[1]:
steps[1]['environment'] + {'GITHUB_TOKEN': from_secret(github_token)}
else:
steps[1]['environment'] = {'GITHUB_TOKEN': from_secret(github_token)}
return pipeline(
name='main-windows', edition=edition, trigger=dict(trigger, repo=['grafana/grafana']),
steps=[identify_runner_step('windows')] + steps,
depends_on=['main-test-frontend', 'main-test-backend', 'main-build-e2e-publish', 'main-integration-tests'], platform='windows', environment=environment,
name='main-windows',
edition=edition,
trigger=dict(trigger, repo=['grafana/grafana']),
steps=get_windows_steps(edition, ver_mode),
depends_on=[
'main-test-frontend',
'main-test-backend',
'main-build-e2e-publish',
'main-integration-tests',
],
platform='windows',
environment=environment,
)

@ -1,8 +1,9 @@
def integration_test_services_volumes():
return [
{ 'name': 'postgres', 'temp': { 'medium': 'memory' } },
{ 'name': 'mysql', 'temp': { 'medium': 'memory' }
}]
{'name': 'postgres', 'temp': {'medium': 'memory'}},
{'name': 'mysql', 'temp': {'medium': 'memory'}},
]
def integration_test_services(edition):
services = [
@ -10,15 +11,14 @@ def integration_test_services(edition):
'name': 'postgres',
'image': 'postgres:12.3-alpine',
'environment': {
'POSTGRES_USER': 'grafanatest',
'POSTGRES_PASSWORD': 'grafanatest',
'POSTGRES_DB': 'grafanatest',
'PGDATA': '/var/lib/postgresql/data/pgdata',
'POSTGRES_USER': 'grafanatest',
'POSTGRES_PASSWORD': 'grafanatest',
'POSTGRES_DB': 'grafanatest',
'PGDATA': '/var/lib/postgresql/data/pgdata',
},
'volumes': [{
'name': 'postgres',
'path': '/var/lib/postgresql/data/pgdata'
}],
'volumes': [
{'name': 'postgres', 'path': '/var/lib/postgresql/data/pgdata'}
],
},
{
'name': 'mysql',
@ -29,33 +29,36 @@ def integration_test_services(edition):
'MYSQL_USER': 'grafana',
'MYSQL_PASSWORD': 'password',
},
'volumes': [{
'name': 'mysql',
'path': '/var/lib/mysql'
}]
'volumes': [{'name': 'mysql', 'path': '/var/lib/mysql'}],
},
]
if edition in ('enterprise', 'enterprise2'):
services.extend([{
'name': 'redis',
'image': 'redis:6.2.1-alpine',
'environment': {},
}, {
'name': 'memcached',
'image': 'memcached:1.6.9-alpine',
'environment': {},
}])
services.extend(
[
{
'name': 'redis',
'image': 'redis:6.2.1-alpine',
'environment': {},
},
{
'name': 'memcached',
'image': 'memcached:1.6.9-alpine',
'environment': {},
},
]
)
return services
def ldap_service():
return {
'name': 'ldap',
'image': 'osixia/openldap:1.4.0',
'environment': {
'LDAP_ADMIN_PASSWORD': 'grafana',
'LDAP_DOMAIN': 'grafana.org',
'SLAPD_ADDITIONAL_MODULES': 'memberof',
'LDAP_ADMIN_PASSWORD': 'grafana',
'LDAP_DOMAIN': 'grafana.org',
'SLAPD_ADDITIONAL_MODULES': 'memberof',
},
}

File diff suppressed because it is too large Load Diff

@ -4,25 +4,33 @@ load(
'slack_step',
)
load('scripts/drone/vault.star', 'from_secret', 'github_token', 'pull_secret', 'drone_token')
load(
'scripts/drone/vault.star',
'from_secret',
'pull_secret',
)
failure_template = 'Build {{build.number}} failed for commit: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>: {{build.link}}\nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nAuthor: {{build.author}}'
drone_change_template = '`.drone.yml` and `starlark` files have been changed on the OSS repo, by: {{build.author}}. \nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nCommit hash: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>'
def pipeline(
name, edition, trigger, steps, services=[], platform='linux', depends_on=[], environment=None, volumes=[],
):
name,
edition,
trigger,
steps,
services=[],
platform='linux',
depends_on=[],
environment=None,
volumes=[],
):
if platform != 'windows':
platform_conf = {
'platform': {
'os': 'linux',
'arch': 'amd64'
},
'platform': {'os': 'linux', 'arch': 'amd64'},
# A shared cache is used on the host
# To avoid issues with parallel builds, we run this repo on single build agents
'node': {
'type': 'no-parallel'
}
'node': {'type': 'no-parallel'},
}
else:
platform_conf = {
@ -43,19 +51,23 @@ def pipeline(
'clone': {
'retries': 3,
},
'volumes': [{
'name': 'docker',
'host': {
'path': '/var/run/docker.sock',
},
}],
'volumes': [
{
'name': 'docker',
'host': {
'path': '/var/run/docker.sock',
},
}
],
'depends_on': depends_on,
'image_pull_secrets': [pull_secret],
}
if environment:
pipeline.update({
'environment': environment,
})
pipeline.update(
{
'environment': environment,
}
)
pipeline['volumes'].extend(volumes)
pipeline.update(platform_conf)
@ -68,7 +80,10 @@ def pipeline(
return pipeline
def notify_pipeline(name, slack_channel, trigger, depends_on=[], template=None, secret=None):
def notify_pipeline(
name, slack_channel, trigger, depends_on=[], template=None, secret=None
):
trigger = dict(trigger)
return {
'kind': 'pipeline',
@ -89,3 +104,9 @@ def notify_pipeline(name, slack_channel, trigger, depends_on=[], template=None,
}
# TODO: this overrides any existing dependencies because we're following the existing logic
# it should append to any existing dependencies
def with_deps(steps, deps=[]):
for step in steps:
step['depends_on'] = deps
return steps

@ -1,5 +1,4 @@
pull_secret = 'dockerconfigjson'
github_token = 'github_token'
drone_token = 'drone_token'
prerelease_bucket = 'prerelease_bucket'
gcp_upload_artifacts_key = 'gcp_upload_artifacts_key'
@ -7,10 +6,10 @@ azure_sp_app_id = 'azure_sp_app_id'
azure_sp_app_pw = 'azure_sp_app_pw'
azure_tenant = 'azure_tenant'
def from_secret(secret):
return {
'from_secret': secret
}
return {'from_secret': secret}
def vault_secret(name, path, key):
return {
@ -19,25 +18,65 @@ def vault_secret(name, path, key):
'get': {
'path': path,
'name': key,
}
},
}
def secrets():
return [
vault_secret(pull_secret, 'secret/data/common/gcr', '.dockerconfigjson'),
vault_secret(github_token, 'infra/data/ci/github/grafanabot', 'pat'),
vault_secret('github_token', 'infra/data/ci/github/grafanabot', 'pat'),
vault_secret(drone_token, 'infra/data/ci/drone', 'machine-user-token'),
vault_secret(prerelease_bucket, 'infra/data/ci/grafana/prerelease', 'bucket'),
vault_secret(gcp_upload_artifacts_key, 'infra/data/ci/grafana/releng/artifacts-uploader-service-account', 'credentials.json'),
vault_secret(azure_sp_app_id, 'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials', 'application_id'),
vault_secret(azure_sp_app_pw, 'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials', 'application_secret'),
vault_secret(azure_tenant, 'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials', 'tenant_id'),
vault_secret(
gcp_upload_artifacts_key,
'infra/data/ci/grafana/releng/artifacts-uploader-service-account',
'credentials.json',
),
vault_secret(
azure_sp_app_id,
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
'application_id',
),
vault_secret(
azure_sp_app_pw,
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
'application_secret',
),
vault_secret(
azure_tenant,
'infra/data/ci/datasources/cpp-azure-resourcemanager-credentials',
'tenant_id',
),
# Package publishing
vault_secret('packages_gpg_public_key', 'infra/data/ci/packages-publish/gpg', 'public-key'),
vault_secret('packages_gpg_private_key', 'infra/data/ci/packages-publish/gpg', 'private-key'),
vault_secret('packages_gpg_passphrase', 'infra/data/ci/packages-publish/gpg', 'passphrase'),
vault_secret('packages_service_account', 'infra/data/ci/packages-publish/service-account', 'credentials.json'),
vault_secret('packages_access_key_id', 'infra/data/ci/packages-publish/bucket-credentials', 'AccessID'),
vault_secret('packages_secret_access_key', 'infra/data/ci/packages-publish/bucket-credentials', 'Secret'),
vault_secret(
'packages_gpg_public_key',
'infra/data/ci/packages-publish/gpg',
'public-key',
),
vault_secret(
'packages_gpg_private_key',
'infra/data/ci/packages-publish/gpg',
'private-key',
),
vault_secret(
'packages_gpg_passphrase',
'infra/data/ci/packages-publish/gpg',
'passphrase',
),
vault_secret(
'packages_service_account',
'infra/data/ci/packages-publish/service-account',
'credentials.json',
),
vault_secret(
'packages_access_key_id',
'infra/data/ci/packages-publish/bucket-credentials',
'AccessID',
),
vault_secret(
'packages_secret_access_key',
'infra/data/ci/packages-publish/bucket-credentials',
'Secret',
),
]

@ -6,7 +6,12 @@ load(
)
ver_mode = 'release-branch'
trigger={'ref': ['refs/heads/v[0-9]*']}
trigger = {'ref': ['refs/heads/v[0-9]*']}
def version_branch_pipelines():
return oss_pipelines(ver_mode=ver_mode, trigger=trigger) + enterprise_pipelines(ver_mode=ver_mode, trigger=trigger) + enterprise2_pipelines(ver_mode=ver_mode, trigger=trigger)
return (
oss_pipelines(ver_mode=ver_mode, trigger=trigger)
+ enterprise_pipelines(ver_mode=ver_mode, trigger=trigger)
+ enterprise2_pipelines(ver_mode=ver_mode, trigger=trigger)
)

Loading…
Cancel
Save