[v9.4.x] CI: Update CI/CD tooling and pipelines from main (#76881)

* CI: Update CI/CD tooling and pipelines from main (#76814)

* CI: Update CI/CD tooling and pipelines from main

* Update Makefile

* Comment out validate_openapi_spec_step

* Update broken frontend tests

* Fix validate-npm-packages regex to work without suffix

* Fix cypress image version

(cherry picked from commit 03ecb1db39)

* Fix path for ./pkg/kindsys/report.go on Makefile

* Re-add ./pkg/cmd/grafana-cli/runner to make gen-go
pull/77015/head
Guilherme Caulada 2 years ago committed by GitHub
parent 6365037e69
commit 2b54a169b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 20
      .drone.star
  2. 2380
      .drone.yml
  3. 126
      Makefile
  4. 24
      pkg/build/cmd.go
  5. 3
      pkg/build/cmd/buildbackend.go
  6. 3
      pkg/build/cmd/builddocker.go
  7. 3
      pkg/build/cmd/buildfrontend.go
  8. 3
      pkg/build/cmd/buildinternalplugins.go
  9. 3
      pkg/build/cmd/e2etests.go
  10. 3
      pkg/build/cmd/enterprisecheck.go
  11. 3
      pkg/build/cmd/exportversion.go
  12. 3
      pkg/build/cmd/fetchimages.go
  13. 3
      pkg/build/cmd/grafanacom.go
  14. 3
      pkg/build/cmd/main.go
  15. 3
      pkg/build/cmd/npm.go
  16. 3
      pkg/build/cmd/package.go
  17. 3
      pkg/build/cmd/publishaws.go
  18. 3
      pkg/build/cmd/publishgithub.go
  19. 3
      pkg/build/cmd/publishimages_enterprise2.go
  20. 3
      pkg/build/cmd/storestorybook.go
  21. 3
      pkg/build/cmd/uploadcdn.go
  22. 3
      pkg/build/cmd/uploadpackages.go
  23. 3
      pkg/build/cmd/uploadpackages_test.go
  24. 8
      pkg/build/cmd/verifydrone.go
  25. 3
      pkg/build/config/genmetadata.go
  26. 6
      pkg/build/config/genmetadata_test.go
  27. 14
      pkg/build/config/revision.go
  28. 2
      pkg/build/config/version.go
  29. 37
      pkg/build/config/versions.go
  30. 2
      pkg/build/docker/build.go
  31. 3
      pkg/build/droneutil/event_test.go
  32. 25
      pkg/build/env/fallback_test.go
  33. 4
      pkg/build/env/lookup_test.go
  34. 27
      pkg/build/frontend/config_test.go
  35. 3
      pkg/build/fsutil/copy_test.go
  36. 3
      pkg/build/fsutil/exists_test.go
  37. 25
      pkg/build/gcloud/storage/gsutil.go
  38. 13
      pkg/build/git.go
  39. 3
      pkg/build/git/git.go
  40. 3
      pkg/build/git/git_checks_test.go
  41. 3
      pkg/build/git/git_issues_test.go
  42. 3
      pkg/build/git/git_test.go
  43. 10
      pkg/build/grafana/build.go
  44. 2
      pkg/build/lerna/lerna.go
  45. 3
      pkg/build/metrics/publish.go
  46. 6
      pkg/build/npm/npm.go
  47. 10
      pkg/build/packaging/grafana.go
  48. 3
      pkg/build/packaging/grafana_test.go
  49. 3
      pkg/build/version.go
  50. 628
      scripts/drone/TAGS
  51. 36
      scripts/drone/events/cron.star
  52. 61
      scripts/drone/events/main.star
  53. 46
      scripts/drone/events/pr.star
  54. 239
      scripts/drone/events/release.star
  55. 80
      scripts/drone/pipelines/benchmarks.star
  56. 61
      scripts/drone/pipelines/build.star
  57. 24
      scripts/drone/pipelines/ci_images.star
  58. 10
      scripts/drone/pipelines/docs.star
  59. 39
      scripts/drone/pipelines/integration_tests.star
  60. 4
      scripts/drone/pipelines/lint_backend.star
  61. 8
      scripts/drone/pipelines/lint_frontend.star
  62. 16
      scripts/drone/pipelines/shellcheck.star
  63. 17
      scripts/drone/pipelines/test_backend.star
  64. 10
      scripts/drone/pipelines/test_frontend.star
  65. 10
      scripts/drone/pipelines/whats_new_checker.star
  66. 22
      scripts/drone/pipelines/windows.star
  67. 286
      scripts/drone/rgm.star
  68. 46
      scripts/drone/services/services.star
  69. 774
      scripts/drone/steps/lib.star
  70. 187
      scripts/drone/steps/lib_windows.star
  71. 61
      scripts/drone/steps/rgm.star
  72. 49
      scripts/drone/utils/images.star
  73. 3
      scripts/drone/utils/utils.star
  74. 13
      scripts/drone/utils/windows_images.star
  75. 9
      scripts/drone/variables.star
  76. 33
      scripts/drone/vault.star
  77. 16
      scripts/drone/version.star
  78. 80
      scripts/validate-npm-packages.sh

@ -7,8 +7,9 @@
This module returns a Drone configuration including pipelines and secrets. This module returns a Drone configuration including pipelines and secrets.
""" """
load("scripts/drone/events/pr.star", "pr_pipelines") load("scripts/drone/events/cron.star", "cronjobs")
load("scripts/drone/events/main.star", "main_pipelines") load("scripts/drone/events/main.star", "main_pipelines")
load("scripts/drone/events/pr.star", "pr_pipelines")
load( load(
"scripts/drone/events/release.star", "scripts/drone/events/release.star",
"integration_test_pipelines", "integration_test_pipelines",
@ -17,24 +18,22 @@ load(
"publish_packages_pipeline", "publish_packages_pipeline",
) )
load( load(
"scripts/drone/rgm.star", "scripts/drone/pipelines/ci_images.star",
"rgm", "publish_ci_build_container_image_pipeline",
"publish_ci_windows_test_image_pipeline",
) )
load( load(
"scripts/drone/pipelines/publish_images.star", "scripts/drone/pipelines/publish_images.star",
"publish_image_pipelines_public", "publish_image_pipelines_public",
) )
load(
"scripts/drone/pipelines/ci_images.star",
"publish_ci_build_container_image_pipeline",
"publish_ci_windows_test_image_pipeline",
)
load( load(
"scripts/drone/pipelines/windows.star", "scripts/drone/pipelines/windows.star",
"windows_test_backend", "windows_test_backend",
) )
load("scripts/drone/version.star", "version_branch_pipelines") load(
load("scripts/drone/events/cron.star", "cronjobs") "scripts/drone/rgm.star",
"rgm",
)
load("scripts/drone/vault.star", "secrets") load("scripts/drone/vault.star", "secrets")
def main(_ctx): def main(_ctx):
@ -50,7 +49,6 @@ def main(_ctx):
"event": ["promote"], "event": ["promote"],
"target": ["test-windows"], "target": ["test-windows"],
}, "oss", "testing")] + }, "oss", "testing")] +
version_branch_pipelines() +
integration_test_pipelines() + integration_test_pipelines() +
publish_ci_windows_test_image_pipeline() + publish_ci_windows_test_image_pipeline() +
publish_ci_build_container_image_pipeline() + publish_ci_build_container_image_pipeline() +

File diff suppressed because it is too large Load Diff

@ -13,9 +13,12 @@ GO = go
GO_FILES ?= ./pkg/... GO_FILES ?= ./pkg/...
SH_FILES ?= $(shell find ./scripts -name *.sh) SH_FILES ?= $(shell find ./scripts -name *.sh)
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev) GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS)) GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS))
targets := $(shell echo '$(sources)' | tr "," " ")
GO_INTEGRATION_TESTS := $(shell find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\(.*\)/' | sort -u)
all: deps build all: deps build
##@ Dependencies ##@ Dependencies
@ -33,35 +36,68 @@ node_modules: package.json yarn.lock ## Install node modules.
##@ Swagger ##@ Swagger
SPEC_TARGET = public/api-spec.json SPEC_TARGET = public/api-spec.json
MERGED_SPEC_TARGET := public/api-merged.json ENTERPRISE_SPEC_TARGET = public/api-enterprise-spec.json
MERGED_SPEC_TARGET = public/api-merged.json
NGALERT_SPEC_TARGET = pkg/services/ngalert/api/tooling/api.json NGALERT_SPEC_TARGET = pkg/services/ngalert/api/tooling/api.json
$(NGALERT_SPEC_TARGET): $(NGALERT_SPEC_TARGET):
+$(MAKE) -C pkg/services/ngalert/api/tooling api.json +$(MAKE) -C pkg/services/ngalert/api/tooling api.json
$(MERGED_SPEC_TARGET): $(SPEC_TARGET) $(NGALERT_SPEC_TARGET) $(SWAGGER) ## Merge generated and ngalert API specs $(MERGED_SPEC_TARGET): swagger-oss-gen swagger-enterprise-gen $(NGALERT_SPEC_TARGET) $(SWAGGER) ## Merge generated and ngalert API specs
# known conflicts DsPermissionType, AddApiKeyCommand, Json, Duration (identical models referenced by both specs) # known conflicts DsPermissionType, AddApiKeyCommand, Json, Duration (identical models referenced by both specs)
$(SWAGGER) mixin $(SPEC_TARGET) $(NGALERT_SPEC_TARGET) --ignore-conflicts -o $(MERGED_SPEC_TARGET) $(SWAGGER) mixin $(SPEC_TARGET) $(ENTERPRISE_SPEC_TARGET) $(NGALERT_SPEC_TARGET) --ignore-conflicts -o $(MERGED_SPEC_TARGET)
$(SPEC_TARGET): $(SWAGGER) ## Generate API Swagger specification swagger-oss-gen: $(SWAGGER) ## Generate API Swagger specification
@echo "re-generating swagger for OSS"
rm -f $(SPEC_TARGET)
SWAGGER_GENERATE_EXTENSION=false $(SWAGGER) generate spec -m -w pkg/server -o $(SPEC_TARGET) \ SWAGGER_GENERATE_EXTENSION=false $(SWAGGER) generate spec -m -w pkg/server -o $(SPEC_TARGET) \
-x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \ -x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
-x "github.com/prometheus/alertmanager" \ -x "github.com/prometheus/alertmanager" \
-i pkg/api/swagger_tags.json \ -i pkg/api/swagger_tags.json \
--exclude-tag=alpha --exclude-tag=alpha \
--exclude-tag=enterprise
# this file only exists if enterprise is enabled
ENTERPRISE_EXT_FILE = pkg/extensions/ext.go
ifeq ("$(wildcard $(ENTERPRISE_EXT_FILE))","") ## if enterprise is not enabled
swagger-enterprise-gen:
@echo "skipping re-generating swagger for enterprise: not enabled"
else
swagger-enterprise-gen: $(SWAGGER) ## Generate API Swagger specification
@echo "re-generating swagger for enterprise"
rm -f $(ENTERPRISE_SPEC_TARGET)
SWAGGER_GENERATE_EXTENSION=false $(SWAGGER) generate spec -m -w pkg/server -o $(ENTERPRISE_SPEC_TARGET) \
-x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
-x "github.com/prometheus/alertmanager" \
-i pkg/api/swagger_tags.json \
--exclude-tag=alpha \
--include-tag=enterprise
endif
swagger-api-spec: gen-go $(SPEC_TARGET) $(MERGED_SPEC_TARGET) validate-api-spec swagger-gen: gen-go $(MERGED_SPEC_TARGET) swagger-validate
validate-api-spec: $(MERGED_SPEC_TARGET) $(SWAGGER) ## Validate API spec swagger-validate: $(MERGED_SPEC_TARGET) $(SWAGGER) ## Validate API spec
$(SWAGGER) validate $(<) $(SWAGGER) validate $(<)
clean-api-spec: swagger-clean:
rm $(SPEC_TARGET) $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET) rm -f $(SPEC_TARGET) $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET)
.PHONY: cleanup-old-git-hooks
cleanup-old-git-hooks:
./scripts/cleanup-husky.sh
.PHONY: lefthook-install
lefthook-install: cleanup-old-git-hooks $(LEFTHOOK) # install lefthook for pre-commit hooks
$(LEFTHOOK) install -f
.PHONY: lefthook-uninstall
lefthook-uninstall: $(LEFTHOOK)
$(LEFTHOOK) uninstall
##@ OpenAPI 3 ##@ OpenAPI 3
OAPI_SPEC_TARGET = public/openapi3.json OAPI_SPEC_TARGET = public/openapi3.json
openapi3-gen: swagger-api-spec ## Generates OpenApi 3 specs from the Swagger 2 already generated openapi3-gen: swagger-gen ## Generates OpenApi 3 specs from the Swagger 2 already generated
$(GO) run scripts/openapi3/openapi3conv.go $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET) $(GO) run scripts/openapi3/openapi3conv.go $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET)
##@ Building ##@ Building
@ -72,7 +108,7 @@ gen-cue: ## Do all CUE/Thema code generation
go generate ./public/app/plugins/gen.go go generate ./public/app/plugins/gen.go
go generate ./pkg/kindsys/report.go go generate ./pkg/kindsys/report.go
gen-go: $(WIRE) gen-cue gen-go: $(WIRE)
@echo "generate go files" @echo "generate go files"
$(WIRE) gen -tags $(WIRE_TAGS) ./pkg/server ./pkg/cmd/grafana-cli/runner $(WIRE) gen -tags $(WIRE_TAGS) ./pkg/server ./pkg/cmd/grafana-cli/runner
@ -84,7 +120,7 @@ fix-cue: $(CUE)
gen-jsonnet: gen-jsonnet:
go generate ./devenv/jsonnet go generate ./devenv/jsonnet
build-go: $(MERGED_SPEC_TARGET) gen-go ## Build all Go binaries. build-go: gen-go ## Build all Go binaries.
@echo "build go files" @echo "build go files"
$(GO) run build.go $(GO_BUILD_FLAGS) build $(GO) run build.go $(GO_BUILD_FLAGS) build
@ -126,19 +162,39 @@ test-go-unit: ## Run unit tests for backend with flags.
.PHONY: test-go-integration .PHONY: test-go-integration
test-go-integration: ## Run integration tests for backend with flags. test-go-integration: ## Run integration tests for backend with flags.
@echo "test backend integration tests" @echo "test backend integration tests"
$(GO) test -run Integration -covermode=atomic -timeout=30m ./pkg/... $(GO) test -count=1 -run "^TestIntegration" -covermode=atomic -timeout=5m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-alertmanager
test-go-integration-alertmanager: ## Run integration tests for the remote alertmanager (config taken from the mimir_backend block).
@echo "test remote alertmanager integration tests"
$(GO) clean -testcache
AM_URL=http://localhost:8080 AM_TENANT_ID=test AM_PASSWORD=test \
$(GO) test -count=1 -run "^TestIntegrationRemoteAlertmanager" -covermode=atomic -timeout=5m ./pkg/services/ngalert/notifier/...
.PHONY: test-go-integration-postgres .PHONY: test-go-integration-postgres
test-go-integration-postgres: devenv-postgres ## Run integration tests for postgres backend with flags. test-go-integration-postgres: devenv-postgres ## Run integration tests for postgres backend with flags.
@echo "test backend integration postgres tests" @echo "test backend integration postgres tests"
$(GO) clean -testcache $(GO) clean -testcache
$(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=postgres go test -run Integration -covermode=atomic -timeout=2m {}' GRAFANA_TEST_DB=postgres \
$(GO) test -p=1 -count=1 -run "^TestIntegration" -covermode=atomic -timeout=10m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-mysql .PHONY: test-go-integration-mysql
test-go-integration-mysql: devenv-mysql ## Run integration tests for mysql backend with flags. test-go-integration-mysql: devenv-mysql ## Run integration tests for mysql backend with flags.
@echo "test backend integration mysql tests" @echo "test backend integration mysql tests"
GRAFANA_TEST_DB=mysql \
$(GO) test -p=1 -count=1 -run "^TestIntegration" -covermode=atomic -timeout=10m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-redis
test-go-integration-redis: ## Run integration tests for redis cache.
@echo "test backend integration redis tests"
$(GO) clean -testcache $(GO) clean -testcache
$(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=mysql go test -run Integration -covermode=atomic -timeout=2m {}' REDIS_URL=localhost:6379 $(GO) test -run IntegrationRedis -covermode=atomic -timeout=2m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-memcached
test-go-integration-memcached: ## Run integration tests for memcached cache.
@echo "test backend integration memcached tests"
$(GO) clean -testcache
MEMCACHED_HOSTS=localhost:11211 $(GO) test -run IntegrationMemcached -covermode=atomic -timeout=2m $(GO_INTEGRATION_TESTS)
test-js: ## Run tests for frontend. test-js: ## Run tests for frontend.
@echo "test frontend" @echo "test frontend"
@ -162,19 +218,36 @@ shellcheck: $(SH_FILES) ## Run checks for shell scripts.
##@ Docker ##@ Docker
TAG_SUFFIX=$(if $(WIRE_TAGS)!=oss,-$(WIRE_TAGS))
PLATFORM=linux/amd64
build-docker-full: ## Build Docker image for development. build-docker-full: ## Build Docker image for development.
@echo "build docker container" @echo "build docker container"
DOCKER_BUILDKIT=1 \ tar -ch . | \
docker build \ docker buildx build - \
--tag grafana/grafana:dev . --platform $(PLATFORM) \
--build-arg BINGO=false \
--build-arg GO_BUILD_TAGS=$(GO_BUILD_TAGS) \
--build-arg WIRE_TAGS=$(WIRE_TAGS) \
--build-arg COMMIT_SHA=$$(git rev-parse HEAD) \
--build-arg BUILD_BRANCH=$$(git rev-parse --abbrev-ref HEAD) \
--tag grafana/grafana$(TAG_SUFFIX):dev \
$(DOCKER_BUILD_ARGS)
build-docker-full-ubuntu: ## Build Docker image based on Ubuntu for development. build-docker-full-ubuntu: ## Build Docker image based on Ubuntu for development.
@echo "build docker container" @echo "build docker container"
DOCKER_BUILDKIT=1 \ tar -ch . | \
docker build \ docker buildx build - \
--build-arg BASE_IMAGE=ubuntu:20.04 \ --platform $(PLATFORM) \
--build-arg BINGO=false \
--build-arg GO_BUILD_TAGS=$(GO_BUILD_TAGS) \
--build-arg WIRE_TAGS=$(WIRE_TAGS) \
--build-arg COMMIT_SHA=$$(git rev-parse HEAD) \
--build-arg BUILD_BRANCH=$$(git rev-parse --abbrev-ref HEAD) \
--build-arg BASE_IMAGE=ubuntu:22.04 \
--build-arg GO_IMAGE=golang:1.20.10 \ --build-arg GO_IMAGE=golang:1.20.10 \
--tag grafana/grafana:dev-ubuntu . --tag grafana/grafana$(TAG_SUFFIX):dev-ubuntu \
$(DOCKER_BUILD_ARGS)
##@ Services ##@ Services
@ -185,8 +258,6 @@ devenv:
@printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n' @printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n'
else else
devenv: devenv-down ## Start optional services, e.g. postgres, prometheus, and elasticsearch. devenv: devenv-down ## Start optional services, e.g. postgres, prometheus, and elasticsearch.
$(eval targets := $(shell echo '$(sources)' | tr "," " "))
@cd devenv; \ @cd devenv; \
./create_docker_compose.sh $(targets) || \ ./create_docker_compose.sh $(targets) || \
(rm -rf {docker-compose.yaml,conf.tmp,.env}; exit 1) (rm -rf {docker-compose.yaml,conf.tmp,.env}; exit 1)
@ -219,6 +290,9 @@ devenv-mysql:
protobuf: ## Compile protobuf definitions protobuf: ## Compile protobuf definitions
bash scripts/protobuf-check.sh bash scripts/protobuf-check.sh
bash pkg/plugins/backendplugin/pluginextensionv2/generate.sh bash pkg/plugins/backendplugin/pluginextensionv2/generate.sh
bash pkg/plugins/backendplugin/secretsmanagerplugin/generate.sh
bash pkg/services/store/entity/generate.sh
bash pkg/infra/grn/generate.sh
clean: ## Clean up intermediate build artifacts. clean: ## Clean up intermediate build artifacts.
@echo "cleaning" @echo "cleaning"
@ -244,7 +318,7 @@ scripts/drone/TAGS: $(shell find scripts/drone -name '*.star')
etags --lang none --regex="/def \(\w+\)[^:]+:/\1/" --regex="/\s*\(\w+\) =/\1/" $^ -o $@ etags --lang none --regex="/def \(\w+\)[^:]+:/\1/" --regex="/\s*\(\w+\) =/\1/" $^ -o $@
format-drone: format-drone:
buildifier -r scripts/drone buildifier --lint=fix -r scripts/drone
help: ## Display this help. help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST) @awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

@ -217,12 +217,32 @@ func ldflags(opts BuildOpts) (string, error) {
return "", err return "", err
} }
commitSha := getGitSha()
if v := os.Getenv("COMMIT_SHA"); v != "" {
commitSha = v
}
var enterpriseCommitSha string
if opts.enterprise {
enterpriseCommitSha = getGitEnterpriseSha()
if v := os.Getenv("ENTERPRISE_COMMIT_SHA"); v != "" {
enterpriseCommitSha = v
}
}
buildBranch := getGitBranch()
if v := os.Getenv("BUILD_BRANCH"); v != "" {
buildBranch = v
}
var b bytes.Buffer var b bytes.Buffer
b.WriteString("-w") b.WriteString("-w")
b.WriteString(fmt.Sprintf(" -X main.version=%s", opts.version)) b.WriteString(fmt.Sprintf(" -X main.version=%s", opts.version))
b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha())) b.WriteString(fmt.Sprintf(" -X main.commit=%s", commitSha))
if enterpriseCommitSha != "" {
b.WriteString(fmt.Sprintf(" -X main.enterpriseCommit=%s", enterpriseCommitSha))
}
b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp)) b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp))
b.WriteString(fmt.Sprintf(" -X main.buildBranch=%s", getGitBranch())) b.WriteString(fmt.Sprintf(" -X main.buildBranch=%s", buildBranch))
if v := os.Getenv("LDFLAGS"); v != "" { if v := os.Getenv("LDFLAGS"); v != "" {
b.WriteString(fmt.Sprintf(" -extldflags \"%s\"", v)) b.WriteString(fmt.Sprintf(" -extldflags \"%s\"", v))
} }

@ -4,12 +4,13 @@ import (
"fmt" "fmt"
"log" "log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/compilers" "github.com/grafana/grafana/pkg/build/compilers"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil" "github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/grafana" "github.com/grafana/grafana/pkg/build/grafana"
"github.com/grafana/grafana/pkg/build/syncutil" "github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
) )
func BuildBackend(ctx *cli.Context) error { func BuildBackend(ctx *cli.Context) error {

@ -3,10 +3,11 @@ package main
import ( import (
"log" "log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker" "github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud" "github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
) )
func BuildDocker(c *cli.Context) error { func BuildDocker(c *cli.Context) error {

@ -3,11 +3,12 @@ package main
import ( import (
"log" "log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil" "github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/frontend" "github.com/grafana/grafana/pkg/build/frontend"
"github.com/grafana/grafana/pkg/build/syncutil" "github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
) )
func BuildFrontend(c *cli.Context) error { func BuildFrontend(c *cli.Context) error {

@ -4,11 +4,12 @@ import (
"context" "context"
"log" "log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil" "github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/plugins" "github.com/grafana/grafana/pkg/build/plugins"
"github.com/grafana/grafana/pkg/build/syncutil" "github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
) )
func BuildInternalPlugins(c *cli.Context) error { func BuildInternalPlugins(c *cli.Context) error {

@ -6,8 +6,9 @@ import (
"os" "os"
"os/exec" "os/exec"
"github.com/grafana/grafana/pkg/build/e2eutil"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/e2eutil"
) )
func EndToEndTests(c *cli.Context) error { func EndToEndTests(c *cli.Context) error {

@ -6,9 +6,10 @@ import (
"os" "os"
"strconv" "strconv"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/env" "github.com/grafana/grafana/pkg/build/env"
"github.com/grafana/grafana/pkg/build/git" "github.com/grafana/grafana/pkg/build/git"
"github.com/urfave/cli/v2"
) )
// checkOpts are options used to create a new GitHub check for the enterprise downstream test. // checkOpts are options used to create a new GitHub check for the enterprise downstream test.

@ -4,8 +4,9 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
) )
func ExportVersion(c *cli.Context) error { func ExportVersion(c *cli.Context) error {

@ -6,10 +6,11 @@ import (
"os/exec" "os/exec"
"strings" "strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker" "github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud" "github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
) )
const ( const (

@ -9,7 +9,6 @@ import (
"net/http" "net/http"
"net/url" "net/url"
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"strings" "strings"
@ -233,7 +232,7 @@ func getSHA256(u string) ([]byte, error) {
return sha256, nil return sha256, nil
} }
func postRequest(cfg packaging.PublishConfig, pth string, obj interface{}, descr string) error { func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string) error {
var sfx string var sfx string
switch cfg.Edition { switch cfg.Edition {
case config.EditionOSS: case config.EditionOSS:

@ -5,8 +5,9 @@ import (
"os" "os"
"strings" "strings"
"github.com/grafana/grafana/pkg/build/docker"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/docker"
) )
var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5) var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5)

@ -6,8 +6,9 @@ import (
"os/exec" "os/exec"
"strings" "strings"
"github.com/grafana/grafana/pkg/build/npm"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/npm"
) )
func NpmRetrieveAction(c *cli.Context) error { func NpmRetrieveAction(c *cli.Context) error {

@ -5,11 +5,12 @@ import (
"log" "log"
"strings" "strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gpg" "github.com/grafana/grafana/pkg/build/gpg"
"github.com/grafana/grafana/pkg/build/packaging" "github.com/grafana/grafana/pkg/build/packaging"
"github.com/grafana/grafana/pkg/build/syncutil" "github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
) )
func Package(c *cli.Context) error { func Package(c *cli.Context) error {

@ -17,8 +17,9 @@ import (
"github.com/aws/aws-sdk-go/service/marketplacecatalog" "github.com/aws/aws-sdk-go/service/marketplacecatalog"
"github.com/docker/docker/api/types" "github.com/docker/docker/api/types"
"github.com/docker/docker/client" "github.com/docker/docker/client"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
) )
const ( const (

@ -9,9 +9,10 @@ import (
"strings" "strings"
"github.com/google/go-github/github" "github.com/google/go-github/github"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"golang.org/x/oauth2" "golang.org/x/oauth2"
"github.com/grafana/grafana/pkg/build/config"
) )
type githubRepositoryService interface { type githubRepositoryService interface {

@ -6,10 +6,11 @@ import (
"os" "os"
"os/exec" "os/exec"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker" "github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud" "github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
) )
func Enterprise2(c *cli.Context) error { func Enterprise2(c *cli.Context) error {

@ -4,9 +4,10 @@ import (
"log" "log"
"path/filepath" "path/filepath"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gcloud/storage" "github.com/grafana/grafana/pkg/build/gcloud/storage"
"github.com/urfave/cli/v2"
) )
// StoreStorybook implements the sub-command "store-storybook". // StoreStorybook implements the sub-command "store-storybook".

@ -6,9 +6,10 @@ import (
"os" "os"
"path/filepath" "path/filepath"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gcloud/storage" "github.com/grafana/grafana/pkg/build/gcloud/storage"
"github.com/urfave/cli/v2"
) )
// UploadCDN implements the sub-command "upload-cdn". // UploadCDN implements the sub-command "upload-cdn".

@ -9,11 +9,12 @@ import (
"path/filepath" "path/filepath"
"strings" "strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/droneutil" "github.com/grafana/grafana/pkg/build/droneutil"
"github.com/grafana/grafana/pkg/build/gcloud" "github.com/grafana/grafana/pkg/build/gcloud"
"github.com/grafana/grafana/pkg/build/packaging" "github.com/grafana/grafana/pkg/build/packaging"
"github.com/urfave/cli/v2"
) )
const releaseFolder = "release" const releaseFolder = "release"

@ -5,9 +5,10 @@ import (
"fmt" "fmt"
"testing" "testing"
"github.com/grafana/grafana/pkg/build/config"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/config"
) )
func Test_getVersionFolder(t *testing.T) { func Test_getVersionFolder(t *testing.T) {

@ -10,9 +10,7 @@ import (
"github.com/drone/drone-cli/drone/lint" "github.com/drone/drone-cli/drone/lint"
"github.com/drone/drone-cli/drone/starlark" "github.com/drone/drone-cli/drone/starlark"
"github.com/google/go-cmp/cmp" "github.com/google/go-cmp/cmp"
cliv1 "github.com/urfave/cli" cliv1 "github.com/urfave/cli"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"gopkg.in/yaml.v3" "gopkg.in/yaml.v3"
@ -70,7 +68,7 @@ func VerifyDrone(c *cli.Context) error {
return nil return nil
} }
func readConfig(fpath string) ([]map[string]interface{}, error) { func readConfig(fpath string) ([]map[string]any, error) {
//nolint:gosec //nolint:gosec
f, err := os.Open(fpath) f, err := os.Open(fpath)
if err != nil { if err != nil {
@ -84,9 +82,9 @@ func readConfig(fpath string) ([]map[string]interface{}, error) {
// The YAML stream may contain multiple pipeline configurations, read them all // The YAML stream may contain multiple pipeline configurations, read them all
dec := yaml.NewDecoder(f) dec := yaml.NewDecoder(f)
var c []map[string]interface{} var c []map[string]any
for { for {
var m map[string]interface{} var m map[string]any
if err := dec.Decode(&m); err != nil { if err := dec.Decode(&m); err != nil {
if errors.Is(err, io.EOF) { if errors.Is(err, io.EOF) {
break break

@ -5,8 +5,9 @@ import (
"os" "os"
"strings" "strings"
"github.com/grafana/grafana/pkg/build/droneutil"
"github.com/urfave/cli/v2" "github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/droneutil"
) )
func GenerateMetadata(c *cli.Context) (Metadata, error) { func GenerateMetadata(c *cli.Context) (Metadata, error) {

@ -72,10 +72,8 @@ func setUpEnv(t *testing.T, envMap map[string]string) {
t.Helper() t.Helper()
os.Clearenv() os.Clearenv()
err := os.Setenv("DRONE_COMMIT", "abcd12345") t.Setenv("DRONE_COMMIT", "abcd12345")
require.NoError(t, err)
for k, v := range envMap { for k, v := range envMap {
err := os.Setenv(k, v) t.Setenv(k, v)
require.NoError(t, err)
} }
} }

@ -3,6 +3,7 @@ package config
import ( import (
"context" "context"
"fmt" "fmt"
"log"
"strconv" "strconv"
"time" "time"
@ -12,6 +13,7 @@ import (
type Revision struct { type Revision struct {
Timestamp int64 Timestamp int64
SHA256 string SHA256 string
EnterpriseCommit string
Branch string Branch string
} }
@ -42,6 +44,17 @@ func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) {
return Revision{}, err return Revision{}, err
} }
enterpriseCommit, err := executil.OutputAt(ctx, grafanaDir, "git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "..", "rev-parse", "--short", "HEAD")
if err != nil {
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "/tmp/grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
log.Println("Could not get enterprise commit. Error:", err)
}
}
}
branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD") branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD")
if err != nil { if err != nil {
return Revision{}, err return Revision{}, err
@ -49,6 +62,7 @@ func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) {
return Revision{ return Revision{
SHA256: sha, SHA256: sha,
EnterpriseCommit: enterpriseCommit,
Branch: branch, Branch: branch,
Timestamp: stamp, Timestamp: stamp,
}, nil }, nil

@ -94,7 +94,7 @@ func GetPackageJSONVersion(grafanaDir string) (string, error) {
if err != nil { if err != nil {
return "", fmt.Errorf("failed to read %q: %w", pkgJSONPath, err) return "", fmt.Errorf("failed to read %q: %w", pkgJSONPath, err)
} }
pkgObj := map[string]interface{}{} pkgObj := map[string]any{}
if err := json.Unmarshal(pkgJSONB, &pkgObj); err != nil { if err := json.Unmarshal(pkgJSONB, &pkgObj); err != nil {
return "", fmt.Errorf("failed decoding %q: %w", pkgJSONPath, err) return "", fmt.Errorf("failed decoding %q: %w", pkgJSONPath, err)
} }

@ -1,7 +1,5 @@
package config package config
const PublicBucket = "grafana-downloads"
var Versions = VersionMap{ var Versions = VersionMap{
PullRequestMode: { PullRequestMode: {
Variants: []Variant{ Variants: []Variant{
@ -9,8 +7,9 @@ var Versions = VersionMap{
VariantLinuxAmd64Musl, VariantLinuxAmd64Musl,
VariantDarwinAmd64, VariantDarwinAmd64,
VariantWindowsAmd64, VariantWindowsAmd64,
VariantArm64, // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
VariantArm64Musl, // VariantArm64,
// VariantArm64Musl,
}, },
PluginSignature: PluginSignature{ PluginSignature: PluginSignature{
Sign: false, Sign: false,
@ -29,9 +28,10 @@ var Versions = VersionMap{
}, },
MainMode: { MainMode: {
Variants: []Variant{ Variants: []Variant{
VariantArmV6, // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
VariantArmV7, // VariantArmV6,
VariantArmV7Musl, // VariantArmV7,
// VariantArmV7Musl,
VariantArm64, VariantArm64,
VariantArm64Musl, VariantArm64Musl,
VariantDarwinAmd64, VariantDarwinAmd64,
@ -48,7 +48,8 @@ var Versions = VersionMap{
Architectures: []Architecture{ Architectures: []Architecture{
ArchAMD64, ArchAMD64,
ArchARM64, ArchARM64,
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable. // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
}, },
Distribution: []Distribution{ Distribution: []Distribution{
Alpine, Alpine,
@ -64,9 +65,10 @@ var Versions = VersionMap{
}, },
DownstreamMode: { DownstreamMode: {
Variants: []Variant{ Variants: []Variant{
VariantArmV6, // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
VariantArmV7, // VariantArmV6,
VariantArmV7Musl, //VariantArmV7,
// VariantArmV7Musl,
VariantArm64, VariantArm64,
VariantArm64Musl, VariantArm64Musl,
VariantDarwinAmd64, VariantDarwinAmd64,
@ -83,7 +85,8 @@ var Versions = VersionMap{
Architectures: []Architecture{ Architectures: []Architecture{
ArchAMD64, ArchAMD64,
ArchARM64, ArchARM64,
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable. // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
}, },
Distribution: []Distribution{ Distribution: []Distribution{
Alpine, Alpine,
@ -171,9 +174,10 @@ var Versions = VersionMap{
}, },
Enterprise2Mode: { Enterprise2Mode: {
Variants: []Variant{ Variants: []Variant{
VariantArmV6, // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
VariantArmV7, // VariantArmV6,
VariantArmV7Musl, // VariantArmV7,
// VariantArmV7Musl,
VariantArm64, VariantArm64,
VariantArm64Musl, VariantArm64Musl,
VariantDarwinAmd64, VariantDarwinAmd64,
@ -190,7 +194,8 @@ var Versions = VersionMap{
Architectures: []Architecture{ Architectures: []Architecture{
ArchAMD64, ArchAMD64,
ArchARM64, ArchARM64,
ArchARMv7, // https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7,
}, },
Distribution: []Distribution{ Distribution: []Distribution{
Alpine, Alpine,

@ -74,7 +74,7 @@ func BuildImage(version string, arch config.Architecture, grafanaDir string, use
tagSuffix := "" tagSuffix := ""
if useUbuntu { if useUbuntu {
libc = "" libc = ""
baseImage = fmt.Sprintf("%subuntu:20.04", baseArch) baseImage = fmt.Sprintf("%subuntu:22.04", baseArch)
tagSuffix = "-ubuntu" tagSuffix = "-ubuntu"
} }

@ -3,8 +3,9 @@ package droneutil_test
import ( import (
"testing" "testing"
"github.com/grafana/grafana/pkg/build/droneutil"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/droneutil"
) )
func TestGetDroneEvent(t *testing.T) { func TestGetDroneEvent(t *testing.T) {

@ -15,6 +15,11 @@ const (
flag2 = "flag2" flag2 = "flag2"
) )
type flagObj struct {
name string
value string
}
func TestRequireListWithEnvFallback(t *testing.T) { func TestRequireListWithEnvFallback(t *testing.T) {
var app = cli.NewApp() var app = cli.NewApp()
tests := []struct { tests := []struct {
@ -73,7 +78,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
}{ }{
{ {
testName: "string present in the context", testName: "string present in the context",
ctx: cli.NewContext(app, setFlags(t, flag1, flag2, flag.NewFlagSet("test", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError), flagObj{name: flag1, value: "a"}), nil),
name: flag1, name: flag1,
envName: "", envName: "",
expected: "a", expected: "a",
@ -81,7 +86,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
}, },
{ {
testName: "string present in env", testName: "string present in env",
ctx: cli.NewContext(app, setFlags(t, "", "", flag.NewFlagSet("test", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError)), nil),
name: flag1, name: flag1,
envName: setEnv(t, flag1, "a"), envName: setEnv(t, flag1, "a"),
expected: "a", expected: "a",
@ -89,7 +94,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
}, },
{ {
testName: "string absent from both context and env", testName: "string absent from both context and env",
ctx: cli.NewContext(app, setFlags(t, "", flag2, flag.NewFlagSet("test", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError), flagObj{name: flag2, value: "b"}), nil),
name: flag1, name: flag1,
envName: "", envName: "",
expected: "", expected: "",
@ -120,13 +125,12 @@ func applyFlagSet(t *testing.T, aFlag, aValue string) *flag.FlagSet {
return set return set
} }
func setFlags(t *testing.T, flag1, flag2 string, flagSet *flag.FlagSet) *flag.FlagSet { func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet {
t.Helper() t.Helper()
if flag1 != "" { for _, f := range flags {
flagSet.StringVar(&flag1, "flag1", "a", "") if f.name != "" {
flagSet.StringVar(&f.name, f.name, f.value, "")
} }
if flag2 != "" {
flagSet.StringVar(&flag2, "flag2", "b", "")
} }
return flagSet return flagSet
} }
@ -135,9 +139,6 @@ func setEnv(t *testing.T, key, value string) string {
t.Helper() t.Helper()
os.Clearenv() os.Clearenv()
err := os.Setenv(key, value) t.Setenv(key, value)
if err != nil {
require.NoError(t, err)
}
return key return key
} }

@ -3,9 +3,9 @@ package env_test
import ( import (
"testing" "testing"
"github.com/grafana/grafana/pkg/build/env"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/env"
) )
func TestLookup(t *testing.T) { func TestLookup(t *testing.T) {

@ -21,6 +21,11 @@ type packageJson struct {
Version string `json:"version"` Version string `json:"version"`
} }
type flagObj struct {
name string
value string
}
var app = cli.NewApp() var app = cli.NewApp()
func TestGetConfig(t *testing.T) { func TestGetConfig(t *testing.T) {
@ -32,35 +37,35 @@ func TestGetConfig(t *testing.T) {
wantErr bool wantErr bool
}{ }{
{ {
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "package.json matches tag", name: "package.json matches tag",
packageJsonVersion: "10.0.0", packageJsonVersion: "10.0.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: false, wantErr: false,
}, },
{ {
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "custom tag, package.json doesn't match", name: "custom tag, package.json doesn't match",
packageJsonVersion: "10.0.0", packageJsonVersion: "10.0.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0-abcd123pre", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, metadata: config.Metadata{GrafanaVersion: "10.0.0-abcd123pre", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: false, wantErr: false,
}, },
{ {
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "package.json doesn't match tag", name: "package.json doesn't match tag",
packageJsonVersion: "10.1.0", packageJsonVersion: "10.1.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}}, metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: true, wantErr: true,
}, },
{ {
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "test tag event, check should be skipped", name: "test tag event, check should be skipped",
packageJsonVersion: "10.1.0", packageJsonVersion: "10.1.0",
metadata: config.Metadata{GrafanaVersion: "10.1.0-test", ReleaseMode: config.ReleaseMode{Mode: config.TagMode, IsTest: true}}, metadata: config.Metadata{GrafanaVersion: "10.1.0-test", ReleaseMode: config.ReleaseMode{Mode: config.TagMode, IsTest: true}},
wantErr: false, wantErr: false,
}, },
{ {
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, buildID, flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil), ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}, flagObj{name: buildID, value: "12345"}), nil),
name: "non-tag event", name: "non-tag event",
packageJsonVersion: "10.1.0-pre", packageJsonVersion: "10.1.0-pre",
metadata: config.Metadata{GrafanaVersion: "10.1.0-12345pre", ReleaseMode: config.ReleaseMode{Mode: config.PullRequestMode}}, metadata: config.Metadata{GrafanaVersion: "10.1.0-12345pre", ReleaseMode: config.ReleaseMode{Mode: config.PullRequestMode}},
@ -85,16 +90,12 @@ func TestGetConfig(t *testing.T) {
} }
} }
func setFlags(t *testing.T, flag1, flag2, flag3 string, flagSet *flag.FlagSet) *flag.FlagSet { func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet {
t.Helper() t.Helper()
if flag1 != "" { for _, f := range flags {
flagSet.StringVar(&flag1, jobs, "2", "") if f.name != "" {
} flagSet.StringVar(&f.name, f.name, f.value, "")
if flag2 != "" {
flagSet.StringVar(&flag2, githubToken, "token", "")
} }
if flag3 != "" {
flagSet.StringVar(&flag3, buildID, "12345", "")
} }
return flagSet return flagSet
} }

@ -5,9 +5,10 @@ import (
"runtime" "runtime"
"testing" "testing"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/fsutil"
) )
func TestCopyFile(t *testing.T) { func TestCopyFile(t *testing.T) {

@ -3,8 +3,9 @@ package fsutil_test
import ( import (
"testing" "testing"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/fsutil"
) )
func TestExists_NonExistent(t *testing.T) { func TestExists_NonExistent(t *testing.T) {

@ -16,10 +16,11 @@ import (
"time" "time"
"cloud.google.com/go/storage" "cloud.google.com/go/storage"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/grafana/grafana/pkg/build/gcloud"
"google.golang.org/api/iterator" "google.golang.org/api/iterator"
"google.golang.org/api/option" "google.golang.org/api/option"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/grafana/grafana/pkg/build/gcloud"
) )
var ( var (
@ -388,11 +389,17 @@ func GetLatestMainBuild(ctx context.Context, bucket *storage.BucketHandle, path
return "", ErrorNilBucket return "", ErrorNilBucket
} }
it := bucket.Objects(ctx, &storage.Query{ query := &storage.Query{
Prefix: path, Prefix: path,
}) }
err := query.SetAttrSelection([]string{"Name", "Generation"})
if err != nil {
return "", fmt.Errorf("failed to set attribute selector, err: %q", err)
}
it := bucket.Objects(ctx, query)
var files []string var files []string
var oldGeneration int64
for { for {
attrs, err := it.Next() attrs, err := it.Next()
if errors.Is(err, iterator.Done) { if errors.Is(err, iterator.Done) {
@ -401,13 +408,17 @@ func GetLatestMainBuild(ctx context.Context, bucket *storage.BucketHandle, path
if err != nil { if err != nil {
return "", fmt.Errorf("failed to iterate through bucket, err: %w", err) return "", fmt.Errorf("failed to iterate through bucket, err: %w", err)
} }
if attrs.Generation >= oldGeneration {
files = append([]string{attrs.Name}, files...)
oldGeneration = attrs.Generation
} else {
files = append(files, attrs.Name) files = append(files, attrs.Name)
} }
}
var latestVersion string var latestVersion string
for i := len(files) - 1; i >= 0; i-- { for i := 0; i < len(files); i++ {
captureVersion := regexp.MustCompile(`(\d+\.\d+\.\d+-\d+pre)`) captureVersion := regexp.MustCompile(`(\d+\.\d+\.\d+-\d+)`)
if captureVersion.MatchString(files[i]) { if captureVersion.MatchString(files[i]) {
latestVersion = captureVersion.FindString(files[i]) latestVersion = captureVersion.FindString(files[i])
break break

@ -15,3 +15,16 @@ func getGitSha() string {
} }
return string(v) return string(v)
} }
func getGitEnterpriseSha() string {
// supporting the old way of dev setup
v, err := runError("git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
// supporting the new way of dev setup
v, err = runError("git", "-C", "..", "rev-parse", "--short", "HEAD")
if err != nil {
return ""
}
}
return string(v)
}

@ -8,8 +8,9 @@ import (
"regexp" "regexp"
"github.com/google/go-github/v45/github" "github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/stringutil"
"golang.org/x/oauth2" "golang.org/x/oauth2"
"github.com/grafana/grafana/pkg/build/stringutil"
) )
const ( const (

@ -6,8 +6,9 @@ import (
"testing" "testing"
"github.com/google/go-github/v45/github" "github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/git"
) )
type TestChecksService struct { type TestChecksService struct {

@ -6,8 +6,9 @@ import (
"testing" "testing"
"github.com/google/go-github/v45/github" "github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/require" "github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/git"
) )
type TestLabelsService struct { type TestLabelsService struct {

@ -3,8 +3,9 @@ package git_test
import ( import (
"testing" "testing"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/assert" "github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/build/git"
) )
func TestPRCheckRegexp(t *testing.T) { func TestPRCheckRegexp(t *testing.T) {

@ -23,13 +23,19 @@ const (
) )
func GrafanaLDFlags(version string, r config.Revision) []string { func GrafanaLDFlags(version string, r config.Revision) []string {
return []string{ cmd := []string{
"-w", "-w",
fmt.Sprintf("-X main.version=%s", version), fmt.Sprintf("-X main.version=%s", version),
fmt.Sprintf("-X main.commit=%s", r.SHA256), fmt.Sprintf("-X main.commit=%s", r.SHA256),
fmt.Sprintf("-X main.buildstamp=%d", r.Timestamp), fmt.Sprintf("-X main.buildstamp=%d", r.Timestamp),
fmt.Sprintf("-X main.buildBranch=%s", r.Branch), fmt.Sprintf("-X main.buildBranch=%s", r.Branch),
} }
if r.EnterpriseCommit != "" {
cmd = append(cmd, fmt.Sprintf("-X main.enterpriseCommit=%s", r.EnterpriseCommit))
}
return cmd
} }
// BinaryFolder returns the path to where the Grafana binary is build given the provided arguments. // BinaryFolder returns the path to where the Grafana binary is build given the provided arguments.
@ -99,7 +105,7 @@ func BuildGrafanaBinary(ctx context.Context, name, version string, args BuildArg
descriptor := GrafanaDescriptor(opts) descriptor := GrafanaDescriptor(opts)
log.Printf("Building %q for %s\nwith env: %v", binary, descriptor, opts.Env()) log.Printf("Building %q for %s", binary, descriptor)
opts.LdFlags = append(args.LdFlags, GrafanaLDFlags(version, revision)...) opts.LdFlags = append(args.LdFlags, GrafanaLDFlags(version, revision)...)

@ -47,7 +47,7 @@ func GetLernaVersion(grafanaDir string) (string, error) {
if err != nil { if err != nil {
return "", fmt.Errorf("failed to read %q: %w", lernaJSONPath, err) return "", fmt.Errorf("failed to read %q: %w", lernaJSONPath, err)
} }
pkgObj := map[string]interface{}{} pkgObj := map[string]any{}
if err := json.Unmarshal(lernaJSONB, &pkgObj); err != nil { if err := json.Unmarshal(lernaJSONB, &pkgObj); err != nil {
return "", fmt.Errorf("failed decoding %q: %w", lernaJSONPath, err) return "", fmt.Errorf("failed decoding %q: %w", lernaJSONPath, err)
} }

@ -5,10 +5,9 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"log" "log"
"net/http"
"strconv" "strconv"
"time" "time"
"net/http"
) )
type payload struct { type payload struct {

@ -21,11 +21,11 @@ const NpmArtifactDir = "./npm-artifacts"
var packages = []string{ var packages = []string{
"@grafana/ui", "@grafana/ui",
"@grafana/data", "@grafana/data",
"@grafana/toolkit",
"@grafana/runtime", "@grafana/runtime",
"@grafana/e2e", "@grafana/e2e",
"@grafana/e2e-selectors", "@grafana/e2e-selectors",
"@grafana/schema", "@grafana/schema",
"@grafana/flamegraph",
} }
// PublishNpmPackages will publish local NPM packages to NPM registry. // PublishNpmPackages will publish local NPM packages to NPM registry.
@ -117,11 +117,11 @@ func FetchNpmPackages(ctx context.Context, tag, bucketName string) error {
// Latest and next is 9.1.6. // Latest and next is 9.1.6.
// 9.2.0-beta1 is released, the latest should stay on 9.1.6, next should point to 9.2.0-beta1 // 9.2.0-beta1 is released, the latest should stay on 9.1.6, next should point to 9.2.0-beta1
// No move of dist-tags // No move of dist-tags
// 9.1.7 is relased, the latest should point to 9.1.7, next should stay to 9.2.0-beta1 // 9.1.7 is released, the latest should point to 9.1.7, next should stay to 9.2.0-beta1
// No move of dist-tags // No move of dist-tags
// Next week 9.2.0-beta2 is released, the latest should point to 9.1.7, next should point to 9.2.0-beta2 // Next week 9.2.0-beta2 is released, the latest should point to 9.1.7, next should point to 9.2.0-beta2
// No move of dist-tags // No move of dist-tags
// In two weeks 9.2.0 stable is relased, the latest and next should point to 9.2.0. // In two weeks 9.2.0 stable is released, the latest and next should point to 9.2.0.
// The next dist-tag is moved to point to 9.2.0. // The next dist-tag is moved to point to 9.2.0.
// //
// 3. Releasing an older stable than the current stable // 3. Releasing an older stable than the current stable

@ -381,6 +381,9 @@ func executeFPM(options linuxPackageOptions, packageRoot, srcDir string) error {
"--vendor", vendor, "--vendor", vendor,
"-a", string(options.packageArch), "-a", string(options.packageArch),
} }
if options.prermSrc != "" {
args = append(args, "--before-remove", options.prermSrc)
}
if options.edition == config.EditionEnterprise || options.edition == config.EditionEnterprise2 || options.goArch == config.ArchARMv6 { if options.edition == config.EditionEnterprise || options.edition == config.EditionEnterprise2 || options.goArch == config.ArchARMv6 {
args = append(args, "--conflicts", "grafana") args = append(args, "--conflicts", "grafana")
} }
@ -530,7 +533,7 @@ func copyPlugins(ctx context.Context, v config.Variant, grafanaDir, tmpDir strin
if err != nil { if err != nil {
return fmt.Errorf("failed to read %q: %w", filepath.Join(srcDir, "plugin.json"), err) return fmt.Errorf("failed to read %q: %w", filepath.Join(srcDir, "plugin.json"), err)
} }
var plugJSON map[string]interface{} var plugJSON map[string]any
if err := json.Unmarshal(jsonB, &plugJSON); err != nil { if err := json.Unmarshal(jsonB, &plugJSON); err != nil {
return err return err
} }
@ -729,6 +732,7 @@ func realPackageVariant(ctx context.Context, v config.Variant, edition config.Ed
initdScriptFilePath: "/etc/init.d/grafana-server", initdScriptFilePath: "/etc/init.d/grafana-server",
systemdServiceFilePath: "/usr/lib/systemd/system/grafana-server.service", systemdServiceFilePath: "/usr/lib/systemd/system/grafana-server.service",
postinstSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "postinst"), postinstSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "postinst"),
prermSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "prerm"),
initdScriptSrc: filepath.Join(grafanaDir, "packaging", "deb", "init.d", "grafana-server"), initdScriptSrc: filepath.Join(grafanaDir, "packaging", "deb", "init.d", "grafana-server"),
defaultFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "default", "grafana-server"), defaultFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "default", "grafana-server"),
systemdFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "systemd", "grafana-server.service"), systemdFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "systemd", "grafana-server.service"),
@ -767,8 +771,7 @@ func realPackageVariant(ctx context.Context, v config.Variant, edition config.Ed
defaultFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "sysconfig", "grafana-server"), defaultFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "sysconfig", "grafana-server"),
systemdFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "systemd", "grafana-server.service"), systemdFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "systemd", "grafana-server.service"),
wrapperFilePath: filepath.Join(grafanaDir, "packaging", "wrappers"), wrapperFilePath: filepath.Join(grafanaDir, "packaging", "wrappers"),
// chkconfig is depended on since our systemd service wraps a SysV init script, and that requires chkconfig depends: []string{"/sbin/service", "fontconfig", "freetype"},
depends: []string{"/sbin/service", "chkconfig", "fontconfig", "freetype"},
}); err != nil { }); err != nil {
return err return err
} }
@ -845,6 +848,7 @@ type linuxPackageOptions struct {
initdScriptFilePath string initdScriptFilePath string
systemdServiceFilePath string systemdServiceFilePath string
postinstSrc string postinstSrc string
prermSrc string
initdScriptSrc string initdScriptSrc string
defaultFileSrc string defaultFileSrc string
systemdFileSrc string systemdFileSrc string

@ -3,9 +3,10 @@ package packaging_test
import ( import (
"testing" "testing"
"github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/build/config" "github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/packaging" "github.com/grafana/grafana/pkg/build/packaging"
"github.com/stretchr/testify/assert"
) )
func TestPackageRegexp(t *testing.T) { func TestPackageRegexp(t *testing.T) {

@ -4,6 +4,7 @@ import (
"encoding/json" "encoding/json"
"fmt" "fmt"
"os" "os"
"path/filepath"
"strings" "strings"
"time" "time"
) )
@ -14,7 +15,7 @@ type PackageJSON struct {
// Opens the package.json file in the provided directory and returns a struct that represents its contents // Opens the package.json file in the provided directory and returns a struct that represents its contents
func OpenPackageJSON(dir string) (PackageJSON, error) { func OpenPackageJSON(dir string) (PackageJSON, error) {
reader, err := os.Open("package.json") reader, err := os.Open(filepath.Clean(dir + "/package.json"))
if err != nil { if err != nil {
return PackageJSON{}, err return PackageJSON{}, err
} }

@ -1,628 +0,0 @@
events/release.star,6652
ver_mode =ver_mode64,1602
release_trigger =release_trigger65,1623
def store_npm_packages_step():store_npm_packages_step74,1752
def retrieve_npm_packages_step():retrieve_npm_packages_step90,2193
def release_npm_packages_step():release_npm_packages_step107,2663
def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):oss_pipelines123,3076
environment =environment135,3492
edition =edition136,3529
services =services137,3549
volumes =volumes138,3609
package_steps =package_steps139,3659
publish_steps =publish_steps140,3682
should_publish =should_publish141,3705
should_upload =should_upload142,3748
init_steps =init_steps143,3818
build_steps =build_steps152,4033
integration_test_steps =integration_test_steps159,4342
build_storybook =build_storybook182,5254
publish_step =publish_step190,5674
store_npm_step =store_npm_step191,5758
windows_package_steps =windows_package_steps196,5957
windows_pipeline =windows_pipeline198,6044
name =name199,6077
edition =edition200,6127
trigger =trigger201,6154
steps =steps202,6181
platform =platform203,6256
depends_on =depends_on204,6286
environment =environment207,6393
pipelines =pipelines209,6434
name =name211,6470
edition =edition212,6550
trigger =trigger213,6581
services =services214,6612
steps =steps215,6639
environment =environment216,6717
volumes =volumes217,6756
name =name225,6970
edition =edition226,7038
trigger =trigger227,7073
services =services228,7108
steps =steps229,7145
environment =environment230,7329
volumes =volumes231,7372
deps =deps234,7433
def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):enterprise_pipelines247,7856
environment =environment259,8284
edition =edition260,8328
services =services261,8355
volumes =volumes262,8415
package_steps =package_steps263,8465
publish_steps =publish_steps264,8488
should_publish =should_publish265,8511
should_upload =should_upload266,8554
include_enterprise =include_enterprise267,8624
edition2 =edition2268,8673
init_steps =init_steps269,8702
build_steps =build_steps277,8909
integration_test_steps =integration_test_steps284,9218
build_storybook =build_storybook312,10299
publish_step =publish_step324,10892
store_npm_step =store_npm_step325,10976
windows_package_steps =windows_package_steps330,11175
step =step333,11284
deps_on_clone_enterprise_step =deps_on_clone_enterprise_step337,11418
windows_pipeline =windows_pipeline347,11746
name =name348,11779
edition =edition349,11836
trigger =trigger350,11863
steps =steps351,11890
platform =platform352,11965
depends_on =depends_on353,11995
environment =environment356,12109
pipelines =pipelines358,12150
name =name360,12186
edition =edition361,12273
trigger =trigger362,12304
services =services363,12335
steps =steps364,12362
environment =environment365,12440
volumes =volumes366,12479
name =name374,12711
edition =edition375,12786
trigger =trigger376,12821
services =services377,12856
steps =steps378,12893
environment =environment379,13213
volumes =volumes380,13256
deps =deps383,13317
def enterprise2_pipelines(prefix = "", ver_mode = ver_mode, trigger = release_trigger):enterprise2_pipelines397,13769
environment =environment412,14364
edition =edition415,14424
volumes =volumes416,14451
package_steps =package_steps417,14501
publish_steps =publish_steps418,14524
should_publish =should_publish419,14547
should_upload =should_upload420,14590
include_enterprise =include_enterprise421,14660
edition2 =edition2422,14709
init_steps =init_steps423,14738
build_steps =build_steps431,14945
fetch_images =fetch_images442,15355
upload_cdn =upload_cdn444,15497
step =step458,16187
deps_on_clone_enterprise_step =deps_on_clone_enterprise_step462,16321
pipelines =pipelines472,16608
name =name474,16644
edition =edition475,16742
trigger =trigger476,16773
services =services477,16804
steps =steps478,16831
volumes =volumes479,16909
environment =environment480,16940
def publish_artifacts_step(mode):publish_artifacts_step486,17019
security =security487,17053
security =security489,17098
def publish_artifacts_pipelines(mode):publish_artifacts_pipelines501,17538
trigger =trigger502,17577
steps =steps506,17655
name =name512,17768
trigger =trigger513,17820
steps =steps514,17847
edition =edition515,17870
environment =environment516,17895
def publish_packages_pipeline():publish_packages_pipeline519,17945
trigger =trigger526,18162
oss_steps =oss_steps530,18244
enterprise_steps =enterprise_steps538,18560
deps =deps545,18903
name =name552,19062
trigger =trigger553,19101
steps =steps554,19128
edition =edition555,19155
depends_on =depends_on556,19180
environment =environment557,19207
name =name559,19266
trigger =trigger560,19312
steps =steps561,19339
edition =edition562,19373
depends_on =depends_on563,19398
environment =environment564,19425
def publish_npm_pipelines(mode):publish_npm_pipelines567,19482
trigger =trigger568,19515
steps =steps572,19593
name =name580,19772
trigger =trigger581,19827
steps =steps582,19854
edition =edition583,19877
environment =environment584,19902
def artifacts_page_pipeline():artifacts_page_pipeline587,19952
trigger =trigger588,19983
name =name593,20087
trigger =trigger594,20128
steps =steps595,20155
edition =edition596,20220
environment =environment597,20245
def get_e2e_suffix():get_e2e_suffix600,20295
events/cron.star,1016
aquasec_trivy_image =aquasec_trivy_image8,209
def cronjobs(edition):cronjobs10,255
grafana_com_nightly_pipeline =grafana_com_nightly_pipeline11,278
cronName =cronName12,332
name =name13,374
steps =steps14,412
def cron_job_pipeline(cronName, name, steps):cron_job_pipeline24,773
def scan_docker_image_pipeline(edition, tag):scan_docker_image_pipeline43,1175
edition =edition55,1530
edition =edition57,1579
docker_image =docker_image59,1608
cronName =cronName62,1695
name =name63,1725
steps =steps64,1775
def scan_docker_image_unkown_low_medium_vulnerabilities_step(docker_image):scan_docker_image_unkown_low_medium_vulnerabilities_step71,2047
def scan_docker_image_high_critical_vulnerabilities_step(docker_image):scan_docker_image_high_critical_vulnerabilities_step80,2353
def slack_job_failed_step(channel, image):slack_job_failed_step89,2646
def post_to_grafana_com_step():post_to_grafana_com_step103,3069
events/main.star,633
ver_mode =ver_mode49,966
trigger =trigger50,984
def main_pipelines(edition):main_pipelines62,1168
drone_change_trigger =drone_change_trigger63,1197
pipelines =pipelines79,1513
name =name89,1951
slack_channel =slack_channel90,1994
trigger =trigger91,2045
template =template92,2089
secret =secret93,2135
name =name97,2276
slack_channel =slack_channel98,2310
trigger =trigger99,2366
depends_on =depends_on100,2425
template =template101,2563
secret =secret102,2604
events/pr.star,252
ver_mode =ver_mode48,997
trigger =trigger49,1013
def pr_pipelines(edition):pr_pipelines62,1198
def get_pr_trigger(include_paths = None, exclude_paths = None):get_pr_trigger76,2396
paths_ex =paths_ex91,3080
paths_in =paths_in92,3115
services/services.star,225
def integration_test_services_volumes():integration_test_services_volumes5,79
def integration_test_services(edition):integration_test_services14,292
services =services15,332
def ldap_service():ldap_service59,1616
utils/utils.star,561
failure_template =failure_template11,191
drone_change_template =drone_change_template12,509
services =services19,932
platform =platform20,955
depends_on =depends_on21,983
environment =environment22,1008
volumes =volumes23,1036
platform_conf =platform_conf50,2166
platform_conf =platform_conf62,2534
pipeline =pipeline70,2713
def notify_pipeline(name, slack_channel, trigger, depends_on = [], template = None, secret = None):notify_pipeline105,3545
trigger =trigger106,3645
pipelines/trigger_downstream.star,440
trigger =trigger14,249
def enterprise_downstream_pipeline(edition, ver_mode):enterprise_downstream_pipeline26,433
environment =environment27,488
steps =steps28,527
deps =deps29,587
name =name31,672
edition =edition32,714
trigger =trigger33,741
services =services34,768
steps =steps35,791
depends_on =depends_on36,814
environment =environment37,841
pipelines/verify_starlark.star,323
def verify_starlark(trigger, ver_mode):verify_starlark17,305
environment =environment18,345
steps =steps19,382
name =name26,546
edition =edition27,600
trigger =trigger28,625
services =services29,652
steps =steps30,675
environment =environment31,698
pipelines/build.star,508
def build_e2e(trigger, ver_mode, edition):build_e2e39,936
environment =environment50,1096
variants =variants51,1135
init_steps =init_steps52,1219
build_steps =build_steps61,1491
publish_suffix =publish_suffix107,4049
publish_suffix =publish_suffix109,4100
name =name112,4158
edition =edition113,4224
environment =environment114,4249
services =services115,4284
steps =steps116,4307
trigger =trigger117,4349
pipelines/shellcheck.star,386
trigger =trigger15,235
def shellcheck_step():shellcheck_step31,483
def shellcheck_pipeline():shellcheck_pipeline43,725
environment =environment44,752
steps =steps45,789
name =name50,886
edition =edition51,918
trigger =trigger52,943
services =services53,970
steps =steps54,993
environment =environment55,1016
pipelines/verify_drone.star,317
def verify_drone(trigger, ver_mode):verify_drone17,293
environment =environment18,330
steps =steps19,367
name =name26,528
edition =edition27,579
trigger =trigger28,604
services =services29,631
steps =steps30,654
environment =environment31,677
pipelines/test_backend.star,474
def test_backend(trigger, ver_mode, edition = "oss"):test_backend23,463
environment =environment35,882
init_steps =init_steps36,921
test_steps =test_steps46,1291
pipeline_name =pipeline_name51,1387
pipeline_name =pipeline_name53,1492
name =name55,1584
edition =edition56,1614
trigger =trigger57,1641
services =services58,1668
steps =steps59,1691
environment =environment60,1732
pipelines/lint_frontend.star,415
def lint_frontend_pipeline(trigger, ver_mode):lint_frontend_pipeline16,260
environment =environment26,546
yarn_step =yarn_step27,583
init_steps =init_steps29,660
test_steps =test_steps33,736
name =name37,812
edition =edition38,864
trigger =trigger39,889
services =services40,916
steps =steps41,939
environment =environment42,980
pipelines/docs.star,494
docs_paths =docs_paths19,383
def docs_pipelines(edition, ver_mode, trigger):docs_pipelines28,511
environment =environment29,559
steps =steps30,598
name =name40,815
edition =edition41,858
trigger =trigger42,885
services =services43,912
steps =steps44,935
environment =environment45,958
def lint_docs():lint_docs48,1000
def trigger_docs_main():trigger_docs_main63,1328
def trigger_docs_pr():trigger_docs_pr72,1478
pipelines/test_frontend.star,476
def test_frontend(trigger, ver_mode, edition = "oss"):test_frontend20,374
environment =environment32,794
init_steps =init_steps33,833
test_steps =test_steps41,1102
pipeline_name =pipeline_name45,1205
pipeline_name =pipeline_name47,1311
name =name49,1404
edition =edition50,1434
trigger =trigger51,1461
services =services52,1488
steps =steps53,1511
environment =environment54,1552
pipelines/integration_tests.star,483
def integration_tests(trigger, ver_mode, edition):integration_tests26,542
environment =environment37,900
services =services38,939
volumes =volumes39,989
init_steps =init_steps40,1039
test_steps =test_steps48,1282
name =name54,1412
edition =edition55,1468
trigger =trigger56,1493
services =services57,1520
steps =steps58,1549
environment =environment59,1590
volumes =volumes60,1625
pipelines/windows.star,954
def windows(trigger, edition, ver_mode):windows17,339
environment =environment29,798
init_cmds =init_cmds30,837
steps =steps38,1205
bucket =bucket49,1497
ver_part =ver_part51,1590
dir =dir52,1628
dir =dir54,1670
bucket =bucket55,1695
build_no =build_no56,1736
ver_part =ver_part57,1780
installer_commands =installer_commands58,1842
committish =committish100,3763
committish =committish102,3846
committish =committish104,3906
download_grabpl_step_cmds =download_grabpl_step_cmds107,4057
clone_cmds =clone_cmds113,4363
name =name146,5711
edition =edition147,5742
trigger =trigger148,5769
steps =steps149,5830
depends_on =depends_on150,5889
platform =platform151,6007
environment =environment152,6037
pipelines/lint_backend.star,418
def lint_backend_pipeline(trigger, ver_mode):lint_backend_pipeline18,306
environment =environment28,590
wire_step =wire_step29,627
init_steps =init_steps31,704
test_steps =test_steps36,809
name =name43,959
edition =edition44,1010
trigger =trigger45,1035
services =services46,1062
steps =steps47,1085
environment =environment48,1126
pipelines/publish_images.star,998
def publish_image_steps(edition, mode, docker_repo):publish_image_steps17,303
additional_docker_repo =additional_docker_repo31,922
additional_docker_repo =additional_docker_repo33,979
steps =steps34,1034
def publish_image_pipelines_public():publish_image_pipelines_public45,1369
mode =mode51,1521
trigger =trigger52,1541
name =name57,1641
trigger =trigger58,1694
steps =steps59,1721
edition =edition60,1813
environment =environment61,1835
name =name63,1894
trigger =trigger64,1954
steps =steps65,1981
edition =edition66,2091
environment =environment67,2113
def publish_image_pipelines_security():publish_image_pipelines_security70,2170
mode =mode71,2210
trigger =trigger72,2232
name =name77,2332
trigger =trigger78,2392
steps =steps79,2419
edition =edition80,2529
environment =environment81,2551
steps/lib.star,8579
grabpl_version =grabpl_version7,181
build_image =build_image8,208
publish_image =publish_image9,254
deploy_docker_image =deploy_docker_image10,304
alpine_image =alpine_image11,380
curl_image =curl_image12,411
windows_image =windows_image13,452
wix_image =wix_image14,501
go_image =go_image15,536
disable_tests =disable_tests17,564
trigger_oss =trigger_oss18,586
def slack_step(channel, template, secret):slack_step24,653
def yarn_install_step(edition = "oss"):yarn_install_step35,918
deps =deps36,958
deps =deps38,1004
def wire_install_step():wire_install_step48,1222
def identify_runner_step(platform = "linux"):identify_runner_step60,1454
def clone_enterprise_step(ver_mode):clone_enterprise_step78,1916
committish =committish87,2193
committish =committish89,2268
committish =committish91,2317
def init_enterprise_step(ver_mode):init_enterprise_step105,2747
source_commit =source_commit115,3098
source_commit =source_commit117,3151
environment =environment118,3191
token =token121,3280
environment =environment123,3369
token =token126,3458
environment =environment128,3518
token =token129,3543
def download_grabpl_step(platform = "linux"):download_grabpl_step148,4147
def lint_drone_step():lint_drone_step173,4973
def lint_starlark_step():lint_starlark_step185,5216
def enterprise_downstream_step(edition, ver_mode):enterprise_downstream_step206,6000
repo =repo219,6482
step =step225,6623
def lint_backend_step():lint_backend_step247,7248
def benchmark_ldap_step():benchmark_ldap_step265,7713
def build_storybook_step(edition, ver_mode):build_storybook_step278,8087
def store_storybook_step(edition, ver_mode, trigger = None):store_storybook_step300,8743
commands =commands314,9202
commands =commands323,9521
step =step325,9593
when_cond =when_cond338,10125
step =step346,10330
def e2e_tests_artifacts(edition):e2e_tests_artifacts349,10391
def upload_cdn_step(edition, ver_mode, trigger = None):upload_cdn_step386,12378
deps =deps397,12763
step =step407,12970
step =step420,13423
def build_backend_step(edition, ver_mode, variants = None):build_backend_step423,13482
variants_str =variants_str437,14070
variants_str =variants_str439,14109
cmds =cmds443,14256
build_no =build_no449,14418
cmds =cmds450,14461
def build_frontend_step(edition, ver_mode):build_frontend_step468,14906
build_no =build_no478,15246
cmds =cmds482,15356
cmds =cmds487,15505
def build_frontend_package_step(edition, ver_mode):build_frontend_package_step505,15960
build_no =build_no515,16312
cmds =cmds519,16422
cmds =cmds524,16580
def build_plugins_step(edition, ver_mode):build_plugins_step542,17053
env =env544,17121
env =env548,17220
def test_backend_step():test_backend_step563,17607
def test_backend_integration_step():test_backend_integration_step575,17880
def betterer_frontend_step(edition = "oss"):betterer_frontend_step587,18187
deps =deps596,18427
def test_frontend_step(edition = "oss"):test_frontend_step609,18728
deps =deps618,18962
def lint_frontend_step():lint_frontend_step634,19343
def test_a11y_frontend_step(ver_mode, edition, port = 3001):test_a11y_frontend_step652,19793
commands =commands664,20279
failure =failure667,20345
failure =failure672,20483
def frontend_metrics_step(edition, trigger = None):frontend_metrics_step693,21146
step =step706,21507
step =step721,22007
def codespell_step():codespell_step724,22066
def package_step(edition, ver_mode, variants = None):package_step736,22468
deps =deps750,23006
variants_str =variants_str757,23167
variants_str =variants_str759,23206
sign_args =sign_args762,23332
env =env763,23362
test_args =test_args769,23628
sign_args =sign_args771,23661
env =env772,23684
test_args =test_args773,23703
cmds =cmds777,23829
build_no =build_no784,24036
cmds =cmds785,24079
def grafana_server_step(edition, port = 3001):grafana_server_step798,24459
package_file_pfx =package_file_pfx808,24729
package_file_pfx =package_file_pfx810,24788
package_file_pfx =package_file_pfx812,24889
environment =environment814,24938
def e2e_tests_step(suite, edition, port = 3001, tries = None):e2e_tests_step837,25554
cmd =cmd838,25617
def cloud_plugins_e2e_tests_step(suite, edition, cloud, trigger = None):cloud_plugins_e2e_tests_step856,26186
environment =environment869,26649
when =when870,26670
when =when872,26700
environment =environment874,26748
when =when882,27129
branch =branch888,27345
step =step889,27401
step =step901,27822
def build_docs_website_step():build_docs_website_step904,27874
def copy_packages_for_docker_step(edition = None):copy_packages_for_docker_step916,28272
def build_docker_images_step(edition, archs = None, ubuntu = False, publish = False):build_docker_images_step929,28622
cmd =cmd943,29193
ubuntu_sfx =ubuntu_sfx947,29307
ubuntu_sfx =ubuntu_sfx949,29342
environment =environment955,29468
def fetch_images_step(edition):fetch_images_step979,30079
def publish_images_step(edition, ver_mode, mode, docker_repo, trigger = None):publish_images_step997,30745
name =name1013,31562
docker_repo =docker_repo1014,31585
mode =mode1016,31663
mode =mode1018,31709
environment =environment1020,31728
cmd =cmd1026,31912
deps =deps1029,32041
deps =deps1032,32147
name =name1035,32250
docker_repo =docker_repo1036,32273
cmd =cmd1038,32459
step =step1040,32565
step =step1052,32929
def postgres_integration_tests_step():postgres_integration_tests_step1056,32989
cmds =cmds1057,33028
def mysql_integration_tests_step():mysql_integration_tests_step1079,33850
cmds =cmds1080,33886
def redis_integration_tests_step():redis_integration_tests_step1100,34629
def memcached_integration_tests_step():memcached_integration_tests_step1114,35026
def release_canary_npm_packages_step(edition, trigger = None):release_canary_npm_packages_step1128,35435
step =step1141,35805
step =step1153,36143
def enterprise2_suffix(edition):enterprise2_suffix1156,36202
def upload_packages_step(edition, ver_mode, trigger = None):upload_packages_step1161,36320
deps =deps1176,36816
step =step1184,37036
step =step1195,37471
def publish_grafanacom_step(edition, ver_mode):publish_grafanacom_step1198,37530
cmd =cmd1211,38044
build_no =build_no1215,38188
cmd =cmd1216,38231
def publish_linux_packages_step(edition, package_manager = "deb"):publish_linux_packages_step1239,38866
def get_windows_steps(edition, ver_mode):get_windows_steps1261,39989
init_cmds =init_cmds1270,40281
steps =steps1278,40649
bucket =bucket1289,40941
ver_part =ver_part1291,41034
dir =dir1292,41072
dir =dir1294,41114
bucket =bucket1295,41139
build_no =build_no1296,41180
ver_part =ver_part1297,41224
installer_commands =installer_commands1298,41286
committish =committish1340,43207
committish =committish1342,43290
committish =committish1344,43350
download_grabpl_step_cmds =download_grabpl_step_cmds1347,43501
clone_cmds =clone_cmds1353,43807
def verify_gen_cue_step(edition):verify_gen_cue_step1387,45152
deps =deps1388,45186
def verify_gen_jsonnet_step(edition):verify_gen_jsonnet_step1402,45694
deps =deps1403,45732
def trigger_test_release():trigger_test_release1417,46236
def artifacts_page_step():artifacts_page_step1451,47731
def end_to_end_tests_deps():end_to_end_tests_deps1466,48058
def compile_build_cmd(edition = "oss"):compile_build_cmd1476,48321
dependencies =dependencies1477,48361
dependencies =dependencies1479,48432
def get_trigger_storybook(ver_mode):get_trigger_storybook1492,48780
trigger_storybook =trigger_storybook1500,49031
trigger_storybook =trigger_storybook1502,49088
trigger_storybook =trigger_storybook1506,49168
vault.star,444
pull_secret =pull_secret4,87
github_token =github_token5,120
drone_token =drone_token6,150
prerelease_bucket =prerelease_bucket7,178
gcp_upload_artifacts_key =gcp_upload_artifacts_key8,218
azure_sp_app_id =azure_sp_app_id9,272
azure_sp_app_pw =azure_sp_app_pw10,308
azure_tenant =azure_tenant11,344
def from_secret(secret):from_secret13,375
def vault_secret(name, path, key):vault_secret18,451
def secrets():secrets28,633
version.star,116
ver_mode =ver_mode12,197
trigger =trigger13,225
def version_branch_pipelines():version_branch_pipelines15,268

@ -2,15 +2,11 @@
This module provides functions for cronjob pipelines and steps used within. This module provides functions for cronjob pipelines and steps used within.
""" """
load("scripts/drone/vault.star", "from_secret")
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
)
load( load(
"scripts/drone/utils/images.star", "scripts/drone/utils/images.star",
"images", "images",
) )
load("scripts/drone/vault.star", "from_secret")
aquasec_trivy_image = "aquasec/trivy:0.21.0" aquasec_trivy_image = "aquasec/trivy:0.21.0"
@ -21,7 +17,6 @@ def cronjobs():
scan_docker_image_pipeline("latest-ubuntu"), scan_docker_image_pipeline("latest-ubuntu"),
scan_docker_image_pipeline("main-ubuntu"), scan_docker_image_pipeline("main-ubuntu"),
scan_build_test_publish_docker_image_pipeline(), scan_build_test_publish_docker_image_pipeline(),
grafana_com_nightly_pipeline(),
] ]
def authenticate_gcr_step(): def authenticate_gcr_step():
@ -59,6 +54,10 @@ def cron_job_pipeline(cronName, name, steps):
"path": "/var/run/docker.sock", "path": "/var/run/docker.sock",
}, },
}, },
{
"name": "config",
"temp": {},
},
], ],
} }
@ -117,13 +116,13 @@ def scan_docker_image_unknown_low_medium_vulnerabilities_step(docker_image):
for key in images: for key in images:
cmds = cmds + ["trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + images[key]] cmds = cmds + ["trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + images[key]]
else: else:
cmds = ["trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + docker_image] cmds = ["trivy image --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + docker_image]
return { return {
"name": "scan-unknown-low-medium-vulnerabilities", "name": "scan-unknown-low-medium-vulnerabilities",
"image": aquasec_trivy_image, "image": aquasec_trivy_image,
"commands": cmds, "commands": cmds,
"depends_on": ["authenticate-gcr"], "depends_on": ["authenticate-gcr"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}], "volumes": [{"name": "docker", "path": "/var/run/docker.sock"}, {"name": "config", "path": "/root/.docker/"}],
} }
def scan_docker_image_high_critical_vulnerabilities_step(docker_image): def scan_docker_image_high_critical_vulnerabilities_step(docker_image):
@ -141,19 +140,22 @@ def scan_docker_image_high_critical_vulnerabilities_step(docker_image):
for key in images: for key in images:
cmds = cmds + ["trivy --exit-code 1 --severity HIGH,CRITICAL " + images[key]] cmds = cmds + ["trivy --exit-code 1 --severity HIGH,CRITICAL " + images[key]]
else: else:
cmds = ["trivy --exit-code 1 --severity HIGH,CRITICAL " + docker_image] cmds = ["trivy image --exit-code 1 --severity HIGH,CRITICAL " + docker_image]
return { return {
"name": "scan-high-critical-vulnerabilities", "name": "scan-high-critical-vulnerabilities",
"image": aquasec_trivy_image, "image": aquasec_trivy_image,
"commands": cmds, "commands": cmds,
"depends_on": ["authenticate-gcr"], "depends_on": ["authenticate-gcr"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}], "environment": {
"GOOGLE_APPLICATION_CREDENTIALS": from_secret("gcr_credentials_json"),
},
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}, {"name": "config", "path": "/root/.docker/"}],
} }
def slack_job_failed_step(channel, image): def slack_job_failed_step(channel, image):
return { return {
"name": "slack-notify-failure", "name": "slack-notify-failure",
"image": images["plugins_slack_image"], "image": images["plugins_slack"],
"settings": { "settings": {
"webhook": from_secret("slack_webhook_backend"), "webhook": from_secret("slack_webhook_backend"),
"channel": channel, "channel": channel,
@ -167,7 +169,7 @@ def slack_job_failed_step(channel, image):
def post_to_grafana_com_step(): def post_to_grafana_com_step():
return { return {
"name": "post-to-grafana-com", "name": "post-to-grafana-com",
"image": images["publish_image"], "image": images["publish"],
"environment": { "environment": {
"GRAFANA_COM_API_KEY": from_secret("grafana_api_key"), "GRAFANA_COM_API_KEY": from_secret("grafana_api_key"),
"GCP_KEY": from_secret("gcp_key"), "GCP_KEY": from_secret("gcp_key"),
@ -175,13 +177,3 @@ def post_to_grafana_com_step():
"depends_on": ["compile-build-cmd"], "depends_on": ["compile-build-cmd"],
"commands": ["./bin/build publish grafana-com --edition oss"], "commands": ["./bin/build publish grafana-com --edition oss"],
} }
def grafana_com_nightly_pipeline():
return cron_job_pipeline(
cronName = "grafana-com-nightly",
name = "grafana-com-nightly",
steps = [
compile_build_cmd(),
post_to_grafana_com_step(),
],
)

@ -3,10 +3,8 @@ This module returns all the pipelines used in the event of pushes to the main br
""" """
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/pipelines/build.star",
"drone_change_template", "build_e2e",
"failure_template",
"notify_pipeline",
) )
load( load(
"scripts/drone/pipelines/docs.star", "scripts/drone/pipelines/docs.star",
@ -14,36 +12,37 @@ load(
"trigger_docs_main", "trigger_docs_main",
) )
load( load(
"scripts/drone/pipelines/test_frontend.star", "scripts/drone/pipelines/integration_tests.star",
"test_frontend", "integration_tests",
) )
load( load(
"scripts/drone/pipelines/test_backend.star", "scripts/drone/pipelines/lint_backend.star",
"test_backend", "lint_backend_pipeline",
) )
load( load(
"scripts/drone/pipelines/integration_tests.star", "scripts/drone/pipelines/lint_frontend.star",
"integration_tests", "lint_frontend_pipeline",
) )
load( load(
"scripts/drone/pipelines/build.star", "scripts/drone/pipelines/test_backend.star",
"build_e2e", "test_backend",
) )
load( load(
"scripts/drone/pipelines/windows.star", "scripts/drone/pipelines/test_frontend.star",
"windows", "test_frontend",
) )
load( load(
"scripts/drone/pipelines/trigger_downstream.star", "scripts/drone/pipelines/trigger_downstream.star",
"enterprise_downstream_pipeline", "enterprise_downstream_pipeline",
) )
load( load(
"scripts/drone/pipelines/lint_backend.star", "scripts/drone/pipelines/windows.star",
"lint_backend_pipeline", "windows",
) )
load( load(
"scripts/drone/pipelines/lint_frontend.star", "scripts/drone/utils/utils.star",
"lint_frontend_pipeline", "failure_template",
"notify_pipeline",
) )
ver_mode = "main" ver_mode = "main"
@ -59,27 +58,12 @@ trigger = {
"latest.json", "latest.json",
], ],
}, },
}
def main_pipelines():
drone_change_trigger = {
"event": [
"push",
],
"branch": "main",
"repo": [ "repo": [
"grafana/grafana", "grafana/grafana",
], ],
"paths": { }
"include": [
".drone.yml",
],
"exclude": [
"exclude",
],
},
}
def main_pipelines():
pipelines = [ pipelines = [
docs_pipelines(ver_mode, trigger_docs_main()), docs_pipelines(ver_mode, trigger_docs_main()),
test_frontend(trigger, ver_mode), test_frontend(trigger, ver_mode),
@ -89,13 +73,6 @@ def main_pipelines():
build_e2e(trigger, ver_mode), build_e2e(trigger, ver_mode),
integration_tests(trigger, prefix = ver_mode, ver_mode = ver_mode), integration_tests(trigger, prefix = ver_mode, ver_mode = ver_mode),
windows(trigger, ver_mode = ver_mode), windows(trigger, ver_mode = ver_mode),
notify_pipeline(
name = "notify-drone-changes",
slack_channel = "slack-webhooks-test",
trigger = drone_change_trigger,
template = drone_change_template,
secret = "drone-changes-webhook",
),
enterprise_downstream_pipeline(), enterprise_downstream_pipeline(),
notify_pipeline( notify_pipeline(
name = "main-notify", name = "main-notify",

@ -4,45 +4,49 @@ It also includes a function generating a PR trigger from a list of included and
""" """
load( load(
"scripts/drone/pipelines/test_frontend.star", "scripts/drone/pipelines/benchmarks.star",
"test_frontend", "integration_benchmarks",
) )
load( load(
"scripts/drone/pipelines/test_backend.star", "scripts/drone/pipelines/build.star",
"test_backend", "build_e2e",
)
load(
"scripts/drone/pipelines/docs.star",
"docs_pipelines",
"trigger_docs_pr",
) )
load( load(
"scripts/drone/pipelines/integration_tests.star", "scripts/drone/pipelines/integration_tests.star",
"integration_tests", "integration_tests",
) )
load( load(
"scripts/drone/pipelines/build.star", "scripts/drone/pipelines/lint_backend.star",
"build_e2e", "lint_backend_pipeline",
) )
load( load(
"scripts/drone/pipelines/verify_drone.star", "scripts/drone/pipelines/lint_frontend.star",
"verify_drone", "lint_frontend_pipeline",
) )
load( load(
"scripts/drone/pipelines/verify_starlark.star", "scripts/drone/pipelines/shellcheck.star",
"verify_starlark", "shellcheck_pipeline",
) )
load( load(
"scripts/drone/pipelines/docs.star", "scripts/drone/pipelines/test_backend.star",
"docs_pipelines", "test_backend",
"trigger_docs_pr",
) )
load( load(
"scripts/drone/pipelines/shellcheck.star", "scripts/drone/pipelines/test_frontend.star",
"shellcheck_pipeline", "test_frontend",
) )
load( load(
"scripts/drone/pipelines/lint_backend.star", "scripts/drone/pipelines/verify_drone.star",
"lint_backend_pipeline", "verify_drone",
) )
load( load(
"scripts/drone/pipelines/lint_frontend.star", "scripts/drone/pipelines/verify_starlark.star",
"lint_frontend_pipeline", "verify_starlark",
) )
ver_mode = "pr" ver_mode = "pr"
@ -130,10 +134,12 @@ def pr_pipelines():
], ],
), ),
prefix = ver_mode, prefix = ver_mode,
ver_mode = ver_mode,
), ),
docs_pipelines(ver_mode, trigger_docs_pr()), docs_pipelines(ver_mode, trigger_docs_pr()),
shellcheck_pipeline(), shellcheck_pipeline(),
integration_benchmarks(
prefix = ver_mode,
),
] ]
def get_pr_trigger(include_paths = None, exclude_paths = None): def get_pr_trigger(include_paths = None, exclude_paths = None):

@ -2,53 +2,36 @@
This module returns all the pipelines used in the event of a release along with supporting functions. This module returns all the pipelines used in the event of a release along with supporting functions.
""" """
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"build_backend_step",
"build_docker_images_step",
"build_frontend_package_step",
"build_frontend_step",
"build_plugins_step",
"build_storybook_step",
"compile_build_cmd", "compile_build_cmd",
"copy_packages_for_docker_step",
"download_grabpl_step", "download_grabpl_step",
"e2e_tests_artifacts",
"e2e_tests_step",
"get_windows_steps",
"grafana_server_step",
"identify_runner_step", "identify_runner_step",
"mysql_integration_tests_step", "memcached_integration_tests_steps",
"package_step", "mysql_integration_tests_steps",
"postgres_integration_tests_step", "postgres_integration_tests_steps",
"publish_grafanacom_step", "publish_grafanacom_step",
"publish_linux_packages_step", "publish_linux_packages_step",
"store_storybook_step", "redis_integration_tests_steps",
"trigger_oss", "remote_alertmanager_integration_tests_steps",
"upload_cdn_step",
"upload_packages_step",
"verify_gen_cue_step", "verify_gen_cue_step",
"verify_gen_jsonnet_step", "verify_gen_jsonnet_step",
"wire_install_step", "wire_install_step",
"yarn_install_step", "yarn_install_step",
) )
load( load(
"scripts/drone/services/services.star", "scripts/drone/utils/images.star",
"integration_test_services", "images",
"integration_test_services_volumes",
) )
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/utils/utils.star",
"pipeline", "pipeline",
) )
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load( load(
"scripts/drone/vault.star", "scripts/drone/vault.star",
"from_secret", "from_secret",
@ -57,51 +40,13 @@ load(
"prerelease_bucket", "prerelease_bucket",
"rgm_gcp_key_base64", "rgm_gcp_key_base64",
) )
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/pipelines/whats_new_checker.star",
"whats_new_checker_pipeline",
)
ver_mode = "release" ver_mode = "release"
release_trigger = {
"event": {
"exclude": [
"promote",
],
},
"ref": {
"include": [
"refs/tags/v*",
],
"exclude": [
"refs/tags/*-cloud*",
],
},
}
def store_npm_packages_step():
return {
"name": "store-npm-packages",
"image": images["build_image"],
"depends_on": [
"compile-build-cmd",
"build-frontend-packages",
],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret(prerelease_bucket),
},
"commands": ["./bin/build artifacts npm store --tag ${DRONE_TAG}"],
}
def retrieve_npm_packages_step(): def retrieve_npm_packages_step():
return { return {
"name": "retrieve-npm-packages", "name": "retrieve-npm-packages",
"image": images["publish_image"], "image": images["publish"],
"depends_on": [ "depends_on": [
"compile-build-cmd", "compile-build-cmd",
"yarn-install", "yarn-install",
@ -117,7 +62,7 @@ def retrieve_npm_packages_step():
def release_npm_packages_step(): def release_npm_packages_step():
return { return {
"name": "release-npm-packages", "name": "release-npm-packages",
"image": images["build_image"], "image": images["go"],
"depends_on": [ "depends_on": [
"compile-build-cmd", "compile-build-cmd",
"retrieve-npm-packages", "retrieve-npm-packages",
@ -129,142 +74,10 @@ def release_npm_packages_step():
"commands": ["./bin/build artifacts npm release --tag ${DRONE_TAG}"], "commands": ["./bin/build artifacts npm release --tag ${DRONE_TAG}"],
} }
def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
"""Generates all pipelines used for Grafana OSS.
Args:
ver_mode: controls which steps are included in the pipeline.
Defaults to 'release'.
trigger: controls which events can trigger the pipeline execution.
Defaults to tag events for tags with a 'v' prefix.
Returns:
List of Drone pipelines.
"""
environment = {"EDITION": "oss"}
services = integration_test_services(edition = "oss")
volumes = integration_test_services_volumes()
init_steps = [
identify_runner_step(),
download_grabpl_step(),
verify_gen_cue_step(),
wire_install_step(),
yarn_install_step(),
compile_build_cmd(),
]
build_steps = [
build_backend_step(ver_mode = ver_mode),
build_frontend_step(ver_mode = ver_mode),
build_frontend_package_step(ver_mode = ver_mode),
build_plugins_step(ver_mode = ver_mode),
package_step(ver_mode = ver_mode),
copy_packages_for_docker_step(),
build_docker_images_step(publish = True),
build_docker_images_step(
publish = True,
ubuntu = True,
),
grafana_server_step(),
e2e_tests_step("dashboards-suite", tries = 3),
e2e_tests_step("smoke-tests-suite", tries = 3),
e2e_tests_step("panels-suite", tries = 3),
e2e_tests_step("various-suite", tries = 3),
e2e_tests_artifacts(),
build_storybook_step(ver_mode = ver_mode),
]
publish_steps = []
if ver_mode in (
"release",
"release-branch",
):
publish_steps.extend(
[
upload_cdn_step(ver_mode = ver_mode, trigger = trigger_oss),
upload_packages_step(
ver_mode = ver_mode,
trigger = trigger_oss,
),
],
)
if ver_mode in ("release",):
publish_steps.extend(
[
store_storybook_step(ver_mode = ver_mode),
store_npm_packages_step(),
],
)
integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
pipelines = []
# We don't need to run integration tests at release time since they have
# been run multiple times before:
if ver_mode in ("release"):
pipelines.append(whats_new_checker_pipeline(release_trigger))
integration_test_steps = []
volumes = []
windows_pipeline_dependencies = [
"{}-build-e2e-publish".format(ver_mode),
"{}-test-frontend".format(ver_mode),
]
pipelines.extend([
pipeline(
name = "{}-build-e2e-publish".format(ver_mode),
trigger = trigger,
services = [],
steps = init_steps + build_steps + publish_steps,
environment = environment,
volumes = volumes,
),
test_frontend(trigger, ver_mode),
test_backend(trigger, ver_mode),
])
if ver_mode not in ("release"):
pipelines.append(pipeline(
name = "{}-integration-tests".format(ver_mode),
trigger = trigger,
services = services,
steps = [
download_grabpl_step(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
] +
integration_test_steps,
environment = environment,
volumes = volumes,
))
windows_pipeline = pipeline(
name = "{}-windows".format(ver_mode),
trigger = trigger,
steps = get_windows_steps(ver_mode = ver_mode),
platform = "windows",
depends_on = windows_pipeline_dependencies,
environment = environment,
)
pipelines.append(windows_pipeline)
return pipelines
def publish_artifacts_step(): def publish_artifacts_step():
return { return {
"name": "publish-artifacts", "name": "publish-artifacts",
"image": images["publish_image"], "image": images["publish"],
"environment": { "environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64), "GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"), "PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -278,7 +91,7 @@ def publish_artifacts_step():
def publish_static_assets_step(): def publish_static_assets_step():
return { return {
"name": "publish-static-assets", "name": "publish-static-assets",
"image": images["publish_image"], "image": images["publish"],
"environment": { "environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64), "GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"), "PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -293,7 +106,7 @@ def publish_static_assets_step():
def publish_storybook_step(): def publish_storybook_step():
return { return {
"name": "publish-storybook", "name": "publish-storybook",
"image": images["publish_image"], "image": images["publish"],
"environment": { "environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64), "GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"), "PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -405,15 +218,17 @@ def integration_test_pipelines():
} }
pipelines = [] pipelines = []
volumes = integration_test_services_volumes() volumes = integration_test_services_volumes()
oss_integration_test_steps = [ integration_test_steps = postgres_integration_tests_steps() + \
postgres_integration_tests_step(), mysql_integration_tests_steps("mysql57", "5.7") + \
mysql_integration_tests_step(), mysql_integration_tests_steps("mysql80", "8.0") + \
] redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
pipelines.append(pipeline( pipelines.append(pipeline(
name = "integration-tests", name = "integration-tests",
trigger = trigger, trigger = trigger,
services = integration_test_services(edition = "oss"), services = integration_test_services(),
steps = [ steps = [
download_grabpl_step(), download_grabpl_step(),
identify_runner_step(), identify_runner_step(),
@ -421,7 +236,7 @@ def integration_test_pipelines():
verify_gen_jsonnet_step(), verify_gen_jsonnet_step(),
wire_install_step(), wire_install_step(),
] + ] +
oss_integration_test_steps, integration_test_steps,
environment = {"EDITION": "oss"}, environment = {"EDITION": "oss"},
volumes = volumes, volumes = volumes,
)) ))
@ -433,7 +248,7 @@ def verify_release_pipeline(
bucket = from_secret(prerelease_bucket), bucket = from_secret(prerelease_bucket),
gcp_key = from_secret(rgm_gcp_key_base64), gcp_key = from_secret(rgm_gcp_key_base64),
version = "${DRONE_TAG}", version = "${DRONE_TAG}",
trigger = release_trigger, trigger = {},
depends_on = [ depends_on = [
"release-build-e2e-publish", "release-build-e2e-publish",
"release-windows", "release-windows",
@ -447,7 +262,7 @@ def verify_release_pipeline(
step = { step = {
"name": "gsutil-stat", "name": "gsutil-stat",
"depends_on": ["clone"], "depends_on": ["clone"],
"image": images["cloudsdk_image"], "image": images["cloudsdk"],
"environment": { "environment": {
"BUCKET": bucket, "BUCKET": bucket,
"GCP_KEY": gcp_key, "GCP_KEY": gcp_key,

@ -0,0 +1,80 @@
"""
This module returns the pipeline used for integration benchmarks.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step",
"integration_benchmarks_step",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def integration_benchmarks(prefix):
"""Generate a pipeline for integration tests.
Args:
prefix: used in the naming of the pipeline.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
services = integration_test_services()
volumes = integration_test_services_volumes()
# In pull requests, attempt to clone grafana enterprise.
init_steps = [enterprise_setup_step(isPromote = True)]
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
# Ensure that verif_gen_cue happens after we clone enterprise
# At the time of writing this, very_gen_cue is depended on by the wire step which is what everything else depends on.
verify_step["depends_on"].append("clone-enterprise")
verify_jsonnet_step["depends_on"].append("clone-enterprise")
init_steps += [
compile_build_cmd(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
]
benchmark_steps = integration_benchmarks_step("sqlite") + \
integration_benchmarks_step("postgres", {
"PGPASSWORD": "grafanatest",
"GRAFANA_TEST_DB": "postgres",
"POSTGRES_HOST": "postgres",
}) + \
integration_benchmarks_step("mysql-5.7", {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": "mysql57",
}) + \
integration_benchmarks_step("mysql-8.0", {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": "mysql80",
})
return pipeline(
name = "{}-integration-benchmarks".format(prefix),
trigger = {
"event": ["promote"],
"target": ["gobenchmarks"],
},
environment = environment,
services = services,
volumes = volumes,
steps = init_steps + benchmark_steps,
)

@ -2,15 +2,10 @@
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"build_backend_step",
"build_docker_images_step",
"build_frontend_package_step", "build_frontend_package_step",
"build_frontend_step",
"build_plugins_step",
"build_storybook_step", "build_storybook_step",
"cloud_plugins_e2e_tests_step", "cloud_plugins_e2e_tests_step",
"compile_build_cmd", "compile_build_cmd",
"copy_packages_for_docker_step",
"download_grabpl_step", "download_grabpl_step",
"e2e_tests_artifacts", "e2e_tests_artifacts",
"e2e_tests_step", "e2e_tests_step",
@ -18,12 +13,12 @@ load(
"frontend_metrics_step", "frontend_metrics_step",
"grafana_server_step", "grafana_server_step",
"identify_runner_step", "identify_runner_step",
"package_step",
"publish_images_step", "publish_images_step",
"release_canary_npm_packages_step", "release_canary_npm_packages_step",
"store_storybook_step", "store_storybook_step",
"test_a11y_frontend_step", "test_a11y_frontend_step",
"trigger_oss", "trigger_oss",
"update_package_json_version",
"upload_cdn_step", "upload_cdn_step",
"upload_packages_step", "upload_packages_step",
"verify_gen_cue_step", "verify_gen_cue_step",
@ -31,6 +26,15 @@ load(
"wire_install_step", "wire_install_step",
"yarn_install_step", "yarn_install_step",
) )
load(
"scripts/drone/steps/rgm.star",
"rgm_build_docker_step",
"rgm_package_step",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/utils/utils.star",
"pipeline", "pipeline",
@ -38,11 +42,11 @@ load(
# @unused # @unused
def build_e2e(trigger, ver_mode): def build_e2e(trigger, ver_mode):
"""Perform e2e building, testing, and publishing." """Perform e2e building, testing, and publishing.
Args: Args:
trigger: controls which events can trigger the pipeline execution. trigger: controls which events can trigger the pipeline execution.
ver_mode: used in the naming of the pipeline. ver_mode: used in the naming of the pipeline. Either 'pr' or 'main'.
Returns: Returns:
Drone pipeline. Drone pipeline.
@ -64,17 +68,19 @@ def build_e2e(trigger, ver_mode):
if ver_mode == "pr": if ver_mode == "pr":
build_steps.extend( build_steps.extend(
[ [
build_frontend_package_step(),
enterprise_downstream_step(ver_mode = ver_mode), enterprise_downstream_step(ver_mode = ver_mode),
], ],
) )
else:
build_steps.extend([
update_package_json_version(),
build_frontend_package_step(depends_on = ["update-package-json-version"]),
])
build_steps.extend( build_steps.extend(
[ [
build_backend_step(ver_mode = ver_mode), rgm_package_step(distros = "linux/amd64,linux/arm64", file = "packages.txt"),
build_frontend_step(ver_mode = ver_mode),
build_frontend_package_step(ver_mode = ver_mode),
build_plugins_step(ver_mode = ver_mode),
package_step(ver_mode = ver_mode),
grafana_server_step(), grafana_server_step(),
e2e_tests_step("dashboards-suite"), e2e_tests_step("dashboards-suite"),
e2e_tests_step("smoke-tests-suite"), e2e_tests_step("smoke-tests-suite"),
@ -87,7 +93,6 @@ def build_e2e(trigger, ver_mode):
), ),
e2e_tests_artifacts(), e2e_tests_artifacts(),
build_storybook_step(ver_mode = ver_mode), build_storybook_step(ver_mode = ver_mode),
copy_packages_for_docker_step(),
test_a11y_frontend_step(ver_mode = ver_mode), test_a11y_frontend_step(ver_mode = ver_mode),
], ],
) )
@ -97,12 +102,12 @@ def build_e2e(trigger, ver_mode):
[ [
store_storybook_step(trigger = trigger_oss, ver_mode = ver_mode), store_storybook_step(trigger = trigger_oss, ver_mode = ver_mode),
frontend_metrics_step(trigger = trigger_oss), frontend_metrics_step(trigger = trigger_oss),
build_docker_images_step( rgm_build_docker_step(
publish = False, "packages.txt",
), images["ubuntu"],
build_docker_images_step( images["alpine"],
publish = False, tag_format = "{{ .version_base }}-{{ .buildID }}-{{ .arch }}",
ubuntu = True, ubuntu_tag_format = "{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}",
), ),
publish_images_step( publish_images_step(
docker_repo = "grafana", docker_repo = "grafana",
@ -128,16 +133,12 @@ def build_e2e(trigger, ver_mode):
elif ver_mode == "pr": elif ver_mode == "pr":
build_steps.extend( build_steps.extend(
[ [
build_docker_images_step( rgm_build_docker_step(
archs = [ "packages.txt",
"amd64", images["ubuntu"],
], images["alpine"],
), tag_format = "{{ .version_base }}-{{ .buildID }}-{{ .arch }}",
build_docker_images_step( ubuntu_tag_format = "{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}",
archs = [
"amd64",
],
ubuntu = True,
), ),
publish_images_step( publish_images_step(
docker_repo = "grafana", docker_repo = "grafana",

@ -3,21 +3,21 @@ This module contains steps and pipelines relating to creating CI Docker images.
""" """
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/utils/images.star",
"pipeline", "images",
) )
load( load(
"scripts/drone/vault.star", "scripts/drone/utils/utils.star",
"from_secret", "pipeline",
"gcp_download_build_container_assets_key",
) )
load( load(
"scripts/drone/utils/windows_images.star", "scripts/drone/utils/windows_images.star",
"windows_images", "windows_images",
) )
load( load(
"scripts/drone/utils/images.star", "scripts/drone/vault.star",
"images", "from_secret",
"gcp_download_build_container_assets_key",
) )
def publish_ci_windows_test_image_pipeline(): def publish_ci_windows_test_image_pipeline():
@ -32,7 +32,7 @@ def publish_ci_windows_test_image_pipeline():
steps = [ steps = [
{ {
"name": "clone", "name": "clone",
"image": windows_images["wix_image"], "image": windows_images["wix"],
"environment": { "environment": {
"GITHUB_TOKEN": from_secret("github_token"), "GITHUB_TOKEN": from_secret("github_token"),
}, },
@ -43,7 +43,7 @@ def publish_ci_windows_test_image_pipeline():
}, },
{ {
"name": "build-and-publish", "name": "build-and-publish",
"image": windows_images["windows_server_core_image"], "image": windows_images["windows_server_core"],
"environment": { "environment": {
"DOCKER_USERNAME": from_secret("docker_username"), "DOCKER_USERNAME": from_secret("docker_username"),
"DOCKER_PASSWORD": from_secret("docker_password"), "DOCKER_PASSWORD": from_secret("docker_password"),
@ -81,14 +81,14 @@ def publish_ci_build_container_image_pipeline():
steps = [ steps = [
{ {
"name": "validate-version", "name": "validate-version",
"image": images["alpine_image"], "image": images["alpine"],
"commands": [ "commands": [
"if [ -z \"${BUILD_CONTAINER_VERSION}\" ]; then echo Missing BUILD_CONTAINER_VERSION; false; fi", "if [ -z \"${BUILD_CONTAINER_VERSION}\" ]; then echo Missing BUILD_CONTAINER_VERSION; false; fi",
], ],
}, },
{ {
"name": "download-macos-sdk", "name": "download-macos-sdk",
"image": images["cloudsdk_image"], "image": images["cloudsdk"],
"environment": { "environment": {
"GCP_KEY": from_secret(gcp_download_build_container_assets_key), "GCP_KEY": from_secret(gcp_download_build_container_assets_key),
}, },
@ -100,7 +100,7 @@ def publish_ci_build_container_image_pipeline():
}, },
{ {
"name": "build-and-publish", # Consider splitting the build and the upload task. "name": "build-and-publish", # Consider splitting the build and the upload task.
"image": images["cloudsdk_image"], "image": images["cloudsdk"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}], "volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"environment": { "environment": {
"DOCKER_USERNAME": from_secret("docker_username"), "DOCKER_USERNAME": from_secret("docker_username"),

@ -7,6 +7,7 @@ load(
"build_docs_website_step", "build_docs_website_step",
"codespell_step", "codespell_step",
"identify_runner_step", "identify_runner_step",
"verify_gen_cue_step",
"yarn_install_step", "yarn_install_step",
) )
load( load(
@ -35,6 +36,7 @@ def docs_pipelines(ver_mode, trigger):
codespell_step(), codespell_step(),
lint_docs(), lint_docs(),
build_docs_website_step(), build_docs_website_step(),
verify_gen_cue_step(),
] ]
return pipeline( return pipeline(
@ -48,7 +50,7 @@ def docs_pipelines(ver_mode, trigger):
def lint_docs(): def lint_docs():
return { return {
"name": "lint-docs", "name": "lint-docs",
"image": images["build_image"], "image": images["node"],
"depends_on": [ "depends_on": [
"yarn-install", "yarn-install",
], ],
@ -66,6 +68,9 @@ def trigger_docs_main():
"event": [ "event": [
"push", "push",
], ],
"repo": [
"grafana/grafana",
],
"paths": docs_paths, "paths": docs_paths,
} }
@ -74,5 +79,8 @@ def trigger_docs_pr():
"event": [ "event": [
"pull_request", "pull_request",
], ],
"repo": [
"grafana/grafana",
],
"paths": docs_paths, "paths": docs_paths,
} }

@ -2,29 +2,32 @@
This module returns the pipeline used for integration tests. This module returns the pipeline used for integration tests.
""" """
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"compile_build_cmd", "compile_build_cmd",
"download_grabpl_step", "download_grabpl_step",
"enterprise_setup_step", "enterprise_setup_step",
"identify_runner_step", "identify_runner_step",
"mysql_integration_tests_step", "memcached_integration_tests_steps",
"postgres_integration_tests_step", "mysql_integration_tests_steps",
"postgres_integration_tests_steps",
"redis_integration_tests_steps",
"remote_alertmanager_integration_tests_steps",
"verify_gen_cue_step", "verify_gen_cue_step",
"verify_gen_jsonnet_step", "verify_gen_jsonnet_step",
"wire_install_step", "wire_install_step",
) )
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/utils/utils.star",
"pipeline", "pipeline",
) )
def integration_tests(trigger, prefix, ver_mode): def integration_tests(trigger, prefix, ver_mode = "pr"):
"""Generate a pipeline for integration tests. """Generate a pipeline for integration tests.
Args: Args:
@ -37,11 +40,14 @@ def integration_tests(trigger, prefix, ver_mode):
""" """
environment = {"EDITION": "oss"} environment = {"EDITION": "oss"}
services = integration_test_services(edition = "oss") services = integration_test_services()
volumes = integration_test_services_volumes() volumes = integration_test_services_volumes()
init_steps = [] init_steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
if ver_mode == "pr": if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise. # In pull requests, attempt to clone grafana enterprise.
init_steps.append(enterprise_setup_step()) init_steps.append(enterprise_setup_step())
@ -50,15 +56,18 @@ def integration_tests(trigger, prefix, ver_mode):
download_grabpl_step(), download_grabpl_step(),
compile_build_cmd(), compile_build_cmd(),
identify_runner_step(), identify_runner_step(),
verify_gen_cue_step(), verify_step,
verify_gen_jsonnet_step(), verify_jsonnet_step,
wire_install_step(), wire_install_step(),
] ]
test_steps = [ # test_steps = [a, b] + [c, d] + [e, f]...
postgres_integration_tests_step(), test_steps = postgres_integration_tests_steps() + \
mysql_integration_tests_step(), mysql_integration_tests_steps("mysql57", "5.7") + \
] mysql_integration_tests_steps("mysql80", "8.0") + \
redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
return pipeline( return pipeline(
name = "{}-integration-tests".format(prefix), name = "{}-integration-tests".format(prefix),

@ -44,6 +44,10 @@ def lint_backend_pipeline(trigger, ver_mode):
test_steps = [ test_steps = [
lint_backend_step(), lint_backend_step(),
# modowners doesn't exist for versions below 10.1.x.
# validate_modfile_step(),
# OpenAPI spec validation is disabled for versions below 10.2.x.
# validate_openapi_spec_step(),
] ]
if ver_mode == "main": if ver_mode == "main":

@ -27,6 +27,9 @@ def lint_frontend_pipeline(trigger, ver_mode):
environment = {"EDITION": "oss"} environment = {"EDITION": "oss"}
init_steps = [] init_steps = []
lint_step = lint_frontend_step()
# i18n step is disabled for versions below 10.1.x.
# i18n_step = verify_i18n_step()
if ver_mode == "pr": if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise. # In pull requests, attempt to clone grafana enterprise.
@ -36,9 +39,10 @@ def lint_frontend_pipeline(trigger, ver_mode):
identify_runner_step(), identify_runner_step(),
yarn_install_step(), yarn_install_step(),
] ]
test_steps = [ test_steps = [
lint_frontend_step(), lint_step,
# i18n step is disabled for versions below 10.1.x.
# i18n_step,
] ]
return pipeline( return pipeline(

@ -3,14 +3,14 @@ This module returns a Drone step and pipeline for linting with shellcheck.
""" """
load("scripts/drone/steps/lib.star", "compile_build_cmd") load("scripts/drone/steps/lib.star", "compile_build_cmd")
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load( load(
"scripts/drone/utils/images.star", "scripts/drone/utils/images.star",
"images", "images",
) )
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
trigger = { trigger = {
"event": [ "event": [
@ -29,12 +29,10 @@ trigger = {
def shellcheck_step(): def shellcheck_step():
return { return {
"name": "shellcheck", "name": "shellcheck",
"image": images["build_image"], "image": images["ubuntu"],
"depends_on": [
"compile-build-cmd",
],
"commands": [ "commands": [
"./bin/build shellcheck", "apt-get update -yq && apt-get install shellcheck",
"shellcheck -e SC1071 -e SC2162 scripts/**/*.sh",
], ],
} }

@ -2,13 +2,8 @@
This module returns the pipeline used for testing backend code. This module returns the pipeline used for testing backend code.
""" """
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step", "enterprise_setup_step",
"identify_runner_step", "identify_runner_step",
"test_backend_integration_step", "test_backend_integration_step",
@ -17,6 +12,10 @@ load(
"verify_gen_jsonnet_step", "verify_gen_jsonnet_step",
"wire_install_step", "wire_install_step",
) )
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_backend(trigger, ver_mode): def test_backend(trigger, ver_mode):
"""Generates the pipeline used for testing OSS backend code. """Generates the pipeline used for testing OSS backend code.
@ -32,15 +31,17 @@ def test_backend(trigger, ver_mode):
steps = [] steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
if ver_mode == "pr": if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise. # In pull requests, attempt to clone grafana enterprise.
steps.append(enterprise_setup_step()) steps.append(enterprise_setup_step())
steps += [ steps += [
identify_runner_step(), identify_runner_step(),
compile_build_cmd(), verify_step,
verify_gen_cue_step(), verify_jsonnet_step,
verify_gen_jsonnet_step(),
wire_install_step(), wire_install_step(),
test_backend_step(), test_backend_step(),
test_backend_integration_step(), test_backend_integration_step(),

@ -2,19 +2,18 @@
This module returns the pipeline used for testing backend code. This module returns the pipeline used for testing backend code.
""" """
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"betterer_frontend_step", "betterer_frontend_step",
"download_grabpl_step",
"enterprise_setup_step", "enterprise_setup_step",
"identify_runner_step", "identify_runner_step",
"test_frontend_step", "test_frontend_step",
"yarn_install_step", "yarn_install_step",
) )
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_frontend(trigger, ver_mode): def test_frontend(trigger, ver_mode):
"""Generates the pipeline used for testing frontend code. """Generates the pipeline used for testing frontend code.
@ -30,7 +29,6 @@ def test_frontend(trigger, ver_mode):
steps = [ steps = [
identify_runner_step(), identify_runner_step(),
download_grabpl_step(),
yarn_install_step(), yarn_install_step(),
betterer_frontend_step(), betterer_frontend_step(),
] ]

@ -2,14 +2,14 @@
This module contains logic for checking if the package.json whats new url matches with the in-flight tag. This module contains logic for checking if the package.json whats new url matches with the in-flight tag.
""" """
load(
"scripts/drone/utils/images.star",
"images",
)
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/steps/lib.star",
"compile_build_cmd", "compile_build_cmd",
) )
load(
"scripts/drone/utils/images.star",
"images",
)
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/utils/utils.star",
"pipeline", "pipeline",
@ -18,7 +18,7 @@ load(
def whats_new_checker_step(): def whats_new_checker_step():
return { return {
"name": "whats-new-checker", "name": "whats-new-checker",
"image": images["go_image"], "image": images["go"],
"depends_on": [ "depends_on": [
"compile-build-cmd", "compile-build-cmd",
], ],

@ -3,15 +3,15 @@ This module returns the pipeline used for building Grafana on Windows.
""" """
load( load(
"scripts/drone/utils/utils.star", "scripts/drone/steps/lib_windows.star",
"pipeline", "clone_step_windows",
"get_windows_steps",
"test_backend_step_windows",
"wire_install_step_windows",
) )
load( load(
"scripts/drone/steps/lib.star", "scripts/drone/utils/utils.star",
"get_windows_steps", "pipeline",
"windows_clone_step",
"windows_test_backend_step",
"windows_wire_install_step",
) )
load( load(
"scripts/drone/utils/windows_images.star", "scripts/drone/utils/windows_images.star",
@ -30,19 +30,19 @@ def windows_test_backend(trigger, edition, ver_mode):
""" """
environment = {"EDITION": edition} environment = {"EDITION": edition}
steps = [ steps = [
windows_clone_step(), clone_step_windows(),
] ]
steps.extend([{ steps.extend([{
"name": "windows-init", "name": "windows-init",
"image": windows_images["windows_go_image"], "image": windows_images["go"],
"depends_on": ["clone"], "depends_on": ["clone"],
"commands": [], "commands": [],
}]) }])
steps.extend([ steps.extend([
windows_wire_install_step(edition), wire_install_step_windows(edition),
windows_test_backend_step(), test_backend_step_windows(),
]) ])
pl = pipeline( pl = pipeline(
name = "{}-test-backend-windows".format(ver_mode), name = "{}-test-backend-windows".format(ver_mode),

@ -4,50 +4,52 @@ rgm uses 'github.com/grafana/grafana-build' to build Grafana on the following ev
* A tag that begins with a 'v' * A tag that begins with a 'v'
""" """
load(
"scripts/drone/steps/lib.star",
"get_windows_steps",
)
load(
"scripts/drone/utils/utils.star",
"ignore_failure",
"pipeline",
)
load( load(
"scripts/drone/events/release.star", "scripts/drone/events/release.star",
"verify_release_pipeline", "verify_release_pipeline",
) )
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load( load(
"scripts/drone/pipelines/test_backend.star", "scripts/drone/pipelines/test_backend.star",
"test_backend", "test_backend",
) )
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load( load(
"scripts/drone/pipelines/whats_new_checker.star", "scripts/drone/pipelines/whats_new_checker.star",
"whats_new_checker_pipeline", "whats_new_checker_pipeline",
) )
load(
"scripts/drone/steps/lib_windows.star",
"get_windows_steps",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
"with_deps",
)
load(
"scripts/drone/variables.star",
"golang_version",
)
load( load(
"scripts/drone/vault.star", "scripts/drone/vault.star",
"from_secret", "from_secret",
"npm_token",
"rgm_cdn_destination",
"rgm_dagger_token", "rgm_dagger_token",
"rgm_destination", "rgm_destination",
"rgm_downloads_destination",
"rgm_gcp_key_base64", "rgm_gcp_key_base64",
"rgm_github_token", "rgm_github_token",
"rgm_storybook_destination",
) )
rgm_env_secrets = {
"GCP_KEY_BASE64": from_secret(rgm_gcp_key_base64),
"DESTINATION": from_secret(rgm_destination),
"GITHUB_TOKEN": from_secret(rgm_github_token),
"_EXPERIMENTAL_DAGGER_CLOUD_TOKEN": from_secret(rgm_dagger_token),
"GPG_PRIVATE_KEY": from_secret("packages_gpg_private_key"),
"GPG_PUBLIC_KEY": from_secret("packages_gpg_public_key"),
"GPG_PASSPHRASE": from_secret("packages_gpg_passphrase"),
}
docs_paths = { docs_paths = {
"exclude": [ "exclude": [
"*.md", "*.md",
@ -73,75 +75,233 @@ tag_trigger = {
}, },
} }
def rgm_build(script = "drone_publish_main.sh", canFail = True): main_trigger = {
rgm_build_step = { "event": [
"name": "rgm-build", "push",
],
"branch": "main",
"paths": docs_paths,
"repo": [
"grafana/grafana",
],
}
nightly_trigger = {
"event": {
"include": [
"cron",
],
},
"cron": {
"include": [
"nightly-release",
],
},
}
version_branch_trigger = {"ref": ["refs/heads/v[0-9]*"]}
def rgm_env_secrets(env):
"""Adds the rgm secret ENV variables to the given env arg
Args:
env: A map of environment varables. This function will adds the necessary secrets to it (and potentially overwrite them).
Returns:
Drone step.
"""
env["DESTINATION"] = from_secret(rgm_destination)
env["STORYBOOK_DESTINATION"] = from_secret(rgm_storybook_destination)
env["CDN_DESTINATION"] = from_secret(rgm_cdn_destination)
env["DOWNLOADS_DESTINATION"] = from_secret(rgm_downloads_destination)
env["GCP_KEY_BASE64"] = from_secret(rgm_gcp_key_base64)
env["GITHUB_TOKEN"] = from_secret(rgm_github_token)
env["_EXPERIMENTAL_DAGGER_CLOUD_TOKEN"] = from_secret(rgm_dagger_token)
env["GPG_PRIVATE_KEY"] = from_secret("packages_gpg_private_key")
env["GPG_PUBLIC_KEY"] = from_secret("packages_gpg_public_key")
env["GPG_PASSPHRASE"] = from_secret("packages_gpg_passphrase")
env["DOCKER_USERNAME"] = from_secret("docker_username")
env["DOCKER_PASSWORD"] = from_secret("docker_password")
env["NPM_TOKEN"] = from_secret(npm_token)
env["GCOM_API_KEY"] = from_secret("grafana_api_key")
return env
def rgm_run(name, script):
"""Returns a pipeline that does a full build & package of Grafana.
Args:
name: The name of the pipeline step.
script: The script in the container to run.
Returns:
Drone step.
"""
env = {
"GO_VERSION": golang_version,
}
rgm_run_step = {
"name": name,
"image": "grafana/grafana-build:main", "image": "grafana/grafana-build:main",
"pull": "always",
"commands": [ "commands": [
"export GRAFANA_DIR=$$(pwd)", "export GRAFANA_DIR=$$(pwd)",
"cd /src && ./scripts/{}".format(script), "cd /src && ./scripts/{}".format(script),
], ],
"environment": rgm_env_secrets, "environment": rgm_env_secrets(env),
# The docker socket is a requirement for running dagger programs # The docker socket is a requirement for running dagger programs
# In the future we should find a way to use dagger without mounting the docker socket. # In the future we should find a way to use dagger without mounting the docker socket.
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}], "volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
} }
if canFail:
rgm_build_step["failure"] = "ignore"
return [ return [
rgm_build_step, rgm_run_step,
] ]
def rgm_main(): def rgm_copy(src, dst):
trigger = { """Copies file from/to GCS.
"event": [
"push", Args:
], src: source of the files.
"branch": "main", dst: destination of the files.
"paths": docs_paths,
"repo": [ Returns:
"grafana/grafana", Drone steps.
], """
commands = [
"printenv GCP_KEY_BASE64 | base64 -d > /tmp/key.json",
"gcloud auth activate-service-account --key-file=/tmp/key.json",
"gcloud storage cp -r {} {}".format(src, dst),
]
if not dst.startswith("gs://"):
commands.insert(0, "mkdir -p {}".format(dst))
rgm_copy_step = {
"name": "rgm-copy",
"image": "google/cloud-sdk:alpine",
"commands": commands,
"environment": rgm_env_secrets({}),
} }
return [
rgm_copy_step,
]
def rgm_publish_packages(bucket = "grafana-packages"):
"""Publish deb and rpm packages.
Args:
bucket: target bucket to publish the packages.
Returns:
Drone steps.
"""
steps = []
for package_manager in ["deb", "rpm"]:
steps.append({
"name": "publish-{}".format(package_manager),
# See https://github.com/grafana/deployment_tools/blob/master/docker/package-publish/README.md for docs on that image
"image": images["package_publish"],
"privileged": True,
"settings": {
"access_key_id": from_secret("packages_access_key_id"),
"secret_access_key": from_secret("packages_secret_access_key"),
"service_account_json": from_secret("packages_service_account"),
"target_bucket": bucket,
"gpg_passphrase": from_secret("packages_gpg_passphrase"),
"gpg_public_key": from_secret("packages_gpg_public_key"),
"gpg_private_key": from_secret("packages_gpg_private_key"),
"package_path": "file:///drone/src/dist/*.{}".format(package_manager),
},
})
return steps
def rgm_main():
# Runs a package / build process (with some distros) when commits are merged to main
return pipeline( return pipeline(
name = "rgm-main-prerelease", name = "rgm-main-prerelease",
trigger = trigger, trigger = main_trigger,
steps = rgm_build(canFail = True), steps = rgm_run("rgm-build", "drone_publish_main.sh"),
depends_on = ["main-test-backend", "main-test-frontend"], depends_on = ["main-test-backend", "main-test-frontend"],
) )
def rgm_tag(): def rgm_tag():
# Runs a package / build process (with all distros) when a tag is made
return pipeline( return pipeline(
name = "rgm-tag-prerelease", name = "rgm-tag-prerelease",
trigger = tag_trigger, trigger = tag_trigger,
steps = rgm_build(script = "drone_publish_tag_grafana.sh", canFail = False), steps = rgm_run("rgm-build", "drone_publish_tag_grafana.sh"),
depends_on = ["release-test-backend", "release-test-frontend"], depends_on = ["release-test-backend", "release-test-frontend"],
) )
def rgm_windows(): def rgm_tag_windows():
return pipeline( return pipeline(
name = "rgm-tag-prerelease-windows", name = "rgm-tag-prerelease-windows",
trigger = tag_trigger, trigger = tag_trigger,
steps = ignore_failure( steps = get_windows_steps(
get_windows_steps(
ver_mode = "release", ver_mode = "release",
bucket = "grafana-prerelease", bucket = "grafana-prerelease",
), ),
),
depends_on = ["rgm-tag-prerelease"], depends_on = ["rgm-tag-prerelease"],
platform = "windows", platform = "windows",
) )
def rgm(): def rgm_version_branch():
# Runs a package / build proces (with all distros) when a commit lands on a version branch
return pipeline(
name = "rgm-version-branch-prerelease",
trigger = version_branch_trigger,
steps = rgm_run("rgm-build", "drone_publish_tag_grafana.sh"),
depends_on = ["release-test-backend", "release-test-frontend"],
)
def rgm_nightly_build():
src = "$${DRONE_WORKSPACE}/dist/*"
dst = "$${DESTINATION}/$${DRONE_BUILD_EVENT}"
copy_steps = with_deps(rgm_copy(src, dst), ["rgm-build"])
return pipeline(
name = "rgm-nightly-build",
trigger = nightly_trigger,
steps = rgm_run("rgm-build", "drone_build_nightly_grafana.sh") + copy_steps,
depends_on = ["nightly-test-backend", "nightly-test-frontend"],
)
def rgm_nightly_publish():
"""Nightly publish pipeline.
Returns:
Drone pipeline.
"""
src = "$${DESTINATION}/$${DRONE_BUILD_EVENT}/*_$${DRONE_BUILD_NUMBER}_*"
dst = "$${DRONE_WORKSPACE}/dist"
publish_steps = with_deps(rgm_run("rgm-publish", "drone_publish_nightly_grafana.sh"), ["rgm-copy"])
package_steps = with_deps(rgm_publish_packages(), ["rgm-publish"])
return pipeline(
name = "rgm-nightly-publish",
trigger = nightly_trigger,
steps = rgm_copy(src, dst) + publish_steps + package_steps,
depends_on = ["rgm-nightly-build"],
)
def rgm_nightly_pipeline():
return [
test_frontend(nightly_trigger, "nightly"),
test_backend(nightly_trigger, "nightly"),
rgm_nightly_build(),
rgm_nightly_publish(),
]
def rgm_tag_pipeline():
return [ return [
whats_new_checker_pipeline(tag_trigger), whats_new_checker_pipeline(tag_trigger),
test_frontend(tag_trigger, "release"), test_frontend(tag_trigger, "release"),
test_backend(tag_trigger, "release"), test_backend(tag_trigger, "release"),
rgm_main(),
rgm_tag(), rgm_tag(),
rgm_windows(), rgm_tag_windows(),
verify_release_pipeline( verify_release_pipeline(
trigger = tag_trigger, trigger = tag_trigger,
name = "rgm-tag-verify-prerelease-assets", name = "rgm-tag-verify-prerelease-assets",
@ -152,3 +312,29 @@ def rgm():
], ],
), ),
] ]
def rgm_version_branch_pipeline():
return [
rgm_version_branch(),
verify_release_pipeline(
trigger = version_branch_trigger,
name = "rgm-prerelease-verify-prerelease-assets",
bucket = "grafana-prerelease",
depends_on = [
"rgm-version-branch-prerelease",
],
),
]
def rgm_main_pipeline():
return [
rgm_main(),
]
def rgm():
return (
rgm_main_pipeline() +
rgm_tag_pipeline() +
rgm_version_branch_pipeline() +
rgm_nightly_pipeline()
)

@ -10,14 +10,15 @@ load(
def integration_test_services_volumes(): def integration_test_services_volumes():
return [ return [
{"name": "postgres", "temp": {"medium": "memory"}}, {"name": "postgres", "temp": {"medium": "memory"}},
{"name": "mysql", "temp": {"medium": "memory"}}, {"name": "mysql57", "temp": {"medium": "memory"}},
{"name": "mysql80", "temp": {"medium": "memory"}},
] ]
def integration_test_services(edition): def integration_test_services():
services = [ services = [
{ {
"name": "postgres", "name": "postgres",
"image": images["postgres_alpine_image"], "image": images["postgres_alpine"],
"environment": { "environment": {
"POSTGRES_USER": "grafanatest", "POSTGRES_USER": "grafanatest",
"POSTGRES_PASSWORD": "grafanatest", "POSTGRES_PASSWORD": "grafanatest",
@ -29,40 +30,53 @@ def integration_test_services(edition):
], ],
}, },
{ {
"name": "mysql", "name": "mysql57",
"image": images["mysql5_image"], "image": images["mysql5"],
"environment": { "environment": {
"MYSQL_ROOT_PASSWORD": "rootpass", "MYSQL_ROOT_PASSWORD": "rootpass",
"MYSQL_DATABASE": "grafana_tests", "MYSQL_DATABASE": "grafana_tests",
"MYSQL_USER": "grafana", "MYSQL_USER": "grafana",
"MYSQL_PASSWORD": "password", "MYSQL_PASSWORD": "password",
}, },
"volumes": [{"name": "mysql", "path": "/var/lib/mysql"}], "volumes": [{"name": "mysql57", "path": "/var/lib/mysql"}],
"commands": ["docker-entrypoint.sh mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci"],
},
{
"name": "mysql80",
"image": images["mysql8"],
"environment": {
"MYSQL_ROOT_PASSWORD": "rootpass",
"MYSQL_DATABASE": "grafana_tests",
"MYSQL_USER": "grafana",
"MYSQL_PASSWORD": "password",
},
"volumes": [{"name": "mysql80", "path": "/var/lib/mysql"}],
"commands": ["docker-entrypoint.sh mysqld --default-authentication-plugin=mysql_native_password"],
},
{
"name": "mimir_backend",
"image": images["mimir"],
"environment": {},
"commands": ["/bin/mimir -target=backend"],
}, },
]
if edition in ("enterprise", "enterprise2"):
services.extend(
[
{ {
"name": "redis", "name": "redis",
"image": "redis:6.2.1-alpine", "image": images["redis_alpine"],
"environment": {}, "environment": {},
}, },
{ {
"name": "memcached", "name": "memcached",
"image": "memcached:1.6.9-alpine", "image": images["memcached_alpine"],
"environment": {}, "environment": {},
}, },
], ]
)
return services return services
def ldap_service(): def ldap_service():
return { return {
"name": "ldap", "name": "ldap",
"image": images["openldap_image"], "image": images["openldap"],
"environment": { "environment": {
"LDAP_ADMIN_PASSWORD": "grafana", "LDAP_ADMIN_PASSWORD": "grafana",
"LDAP_DOMAIN": "grafana.org", "LDAP_DOMAIN": "grafana.org",

File diff suppressed because it is too large Load Diff

@ -0,0 +1,187 @@
"""
This module is a library of Drone steps that exclusively run on windows machines.
"""
load(
"scripts/drone/utils/windows_images.star",
"windows_images",
)
load(
"scripts/drone/variables.star",
"grabpl_version",
)
load(
"scripts/drone/vault.star",
"from_secret",
"gcp_grafanauploads_base64",
"prerelease_bucket",
)
def identify_runner_step_windows():
return {
"name": "identify-runner",
"image": windows_images["1809"],
"commands": [
"echo $env:DRONE_RUNNER_NAME",
],
}
def get_windows_steps(ver_mode, bucket = "%PRERELEASE_BUCKET%"):
"""Generate the list of Windows steps.
Args:
ver_mode: used to differentiate steps for different version modes.
bucket: used to override prerelease bucket.
Returns:
List of Drone steps.
"""
steps = [
identify_runner_step_windows(),
]
init_cmds = [
'$$ProgressPreference = "SilentlyContinue"',
"Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe".format(
grabpl_version,
),
]
steps.extend(
[
{
"name": "windows-init",
"image": windows_images["wix"],
"commands": init_cmds,
},
],
)
if ver_mode in (
"release",
"release-branch",
):
gcp_bucket = "{}/artifacts/downloads".format(bucket)
if ver_mode == "release":
ver_part = "${DRONE_TAG}"
dir = "release"
else:
dir = "main"
gcp_bucket = "grafana-downloads"
build_no = "DRONE_BUILD_NUMBER"
ver_part = "--build-id $$env:{}".format(build_no)
installer_commands = [
"$$gcpKey = $$env:GCP_KEY",
"[System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($$gcpKey)) > gcpkey.json",
# gcloud fails to read the file unless converted with dos2unix
"dos2unix gcpkey.json",
"gcloud auth activate-service-account --key-file=gcpkey.json",
"rm gcpkey.json",
"cp C:\\App\\nssm-2.24.zip .",
]
if ver_mode in ("release",):
version = "${DRONE_TAG:1}"
installer_commands.extend(
[
".\\grabpl.exe windows-installer --target {} --edition oss {}".format(
"gs://{}/{}/oss/{}/grafana-{}.windows-amd64.zip".format(gcp_bucket, ver_part, ver_mode, version),
ver_part,
),
'$$fname = ((Get-Childitem grafana*.msi -name) -split "`n")[0]',
],
)
if ver_mode == "main":
installer_commands.extend(
[
"gsutil cp $$fname gs://{}/oss/{}/".format(gcp_bucket, dir),
'gsutil cp "$$fname.sha256" gs://{}/oss/{}/'.format(
gcp_bucket,
dir,
),
],
)
else:
installer_commands.extend(
[
"gsutil cp $$fname gs://{}/{}/oss/{}/".format(
gcp_bucket,
ver_part,
dir,
),
'gsutil cp "$$fname.sha256" gs://{}/{}/oss/{}/'.format(
gcp_bucket,
ver_part,
dir,
),
],
)
steps.append(
{
"name": "build-windows-installer",
"image": windows_images["wix"],
"depends_on": [
"windows-init",
],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret(prerelease_bucket),
"GITHUB_TOKEN": from_secret("github_token"),
},
"commands": installer_commands,
},
)
return steps
def download_grabpl_step_windows():
return {
"name": "grabpl",
"image": windows_images["wix"],
"commands": [
'$$ProgressPreference = "SilentlyContinue"',
"Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe".format(
grabpl_version,
),
],
}
def test_backend_step_windows():
# TODO: This is mostly a duplicate of "test_backend_step" in lib.star; but this file can't import that one,
# otherwise it creates an import cycle.
return {
"name": "test-backend",
"image": windows_images["go"],
"depends_on": [
"wire-install",
],
"commands": [
"go test -tags requires_buildifer -short -covermode=atomic -timeout=5m ./pkg/...",
],
}
def clone_step_windows():
return {
"name": "clone",
"image": windows_images["wix"],
"environment": {
"GITHUB_TOKEN": from_secret("github_token"),
},
"commands": [
'git clone "https://$$env:GITHUB_TOKEN@github.com/$$env:DRONE_REPO.git" .',
"git checkout -f $$env:DRONE_COMMIT",
],
}
def wire_install_step_windows(edition):
return {
"name": "wire-install",
"image": windows_images["go"],
"commands": [
"go install github.com/google/wire/cmd/wire@v0.5.0",
"wire gen -tags {} ./pkg/server".format(edition),
],
"depends_on": [
"windows-init",
],
}

@ -0,0 +1,61 @@
"""
Individual steps that use 'grafana-build' to replace existing individual steps.
These aren't used in releases.
"""
load(
"scripts/drone/variables.star",
"golang_version",
)
# rgm_package_step will create a tar.gz for use in e2e tests or other PR testing related activities..
def rgm_package_step(distros = "linux/amd64,linux/arm64", file = "packages.txt"):
return {
"name": "rgm-package",
"image": "grafana/grafana-build:main",
"pull": "always",
"depends_on": ["yarn-install"],
"commands": [
"/src/grafana-build package --distro={} ".format(distros) +
"--go-version={} ".format(golang_version) +
"--yarn-cache=$$YARN_CACHE_FOLDER " +
"--build-id=$$DRONE_BUILD_NUMBER " +
"--grafana-dir=$$PWD > {}".format(file),
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
}
# rgm_build_backend will create compile the grafana backend for various platforms. It's preferred to use
# 'rgm_package_step' if you creating a "usable" artifact. This should really only be used to verify that the code is
# compilable.
def rgm_build_backend_step(distros = "linux/amd64,linux/arm64"):
return {
"name": "rgm-package",
"image": "grafana/grafana-build:main",
"pull": "always",
"commands": [
"/src/grafana-build build " +
"--go-version={} ".format(golang_version) +
"--distro={} --grafana-dir=$$PWD".format(distros),
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
}
def rgm_build_docker_step(packages, ubuntu, alpine, depends_on = ["rgm-package"], file = "docker.txt", tag_format = "{{ .version }}-{{ .arch }}", ubuntu_tag_format = "{{ .version }}-ubuntu-{{ .arch }}"):
return {
"name": "rgm-build-docker",
"image": "grafana/grafana-build:main",
"pull": "always",
"commands": [
"docker run --privileged --rm tonistiigi/binfmt --install all",
"/src/grafana-build docker " +
"$(cat {} | grep tar.gz | grep -v docker | grep -v sha256 | awk '{{print \"--package=\" $0}}') ".format(packages) +
"--ubuntu-base={} ".format(ubuntu) +
"--alpine-base={} ".format(alpine) +
"--tag-format='{}' ".format(tag_format) +
"--ubuntu-tag-format='{}' > {}".format(ubuntu_tag_format, file),
"find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i",
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"depends_on": depends_on,
}

@ -2,24 +2,35 @@
This module contains all the docker images that are used to build test and publish Grafana. This module contains all the docker images that are used to build test and publish Grafana.
""" """
load(
"scripts/drone/variables.star",
"golang_version",
"nodejs_version",
)
images = { images = {
"cloudsdk_image": "google/cloud-sdk:431.0.0", "git": "alpine/git:2.40.1",
"build_image": "grafana/build-container:1.7.4", "go": "golang:{}-alpine".format(golang_version),
"publish_image": "grafana/grafana-ci-deploy:1.3.3", "node": "node:{}-alpine".format(nodejs_version),
"alpine_image": "alpine:3.18.3", "cloudsdk": "google/cloud-sdk:431.0.0",
"curl_image": "byrnedo/alpine-curl:0.1.8", "publish": "grafana/grafana-ci-deploy:1.3.3",
"go_image": "golang:1.20.10", "alpine": "alpine:3.18.3",
"plugins_slack_image": "plugins/slack", "ubuntu": "ubuntu:22.04",
"postgres_alpine_image": "postgres:12.3-alpine", "curl": "byrnedo/alpine-curl:0.1.8",
"mysql5_image": "mysql:5.7.39", "plugins_slack": "plugins/slack",
"mysql8_image": "mysql:8.0.32", "python": "python:3.8",
"redis_alpine_image": "redis:6.2.11-alpine", "postgres_alpine": "postgres:12.3-alpine",
"memcached_alpine_image": "memcached:1.6.9-alpine", "mimir": "grafana/mimir:latest",
"package_publish_image": "us.gcr.io/kubernetes-dev/package-publish:latest", "mysql5": "mysql:5.7.39",
"openldap_image": "osixia/openldap:1.4.0", "mysql8": "mysql:8.0.32",
"drone_downstream_image": "grafana/drone-downstream", "redis_alpine": "redis:6.2.11-alpine",
"docker_puppeteer_image": "grafana/docker-puppeteer:1.1.0", "memcached_alpine": "memcached:1.6.9-alpine",
"docs_image": "grafana/docs-base:dbd975af06", "package_publish": "us.gcr.io/kubernetes-dev/package-publish:latest",
"cypress_image": "cypress/included:9.5.1-node16.14.0-slim-chrome99-ff97", "openldap": "osixia/openldap:1.4.0",
"cloud_datasources_e2e_image": "us-docker.pkg.dev/grafanalabs-dev/cloud-data-sources/e2e:latest", "drone_downstream": "grafana/drone-downstream",
"docker_puppeteer": "grafana/docker-puppeteer:1.1.0",
"docs": "grafana/docs-base:dbd975af06",
"cypress": "cypress/included:9.5.1-node16.14.0-slim-chrome99-ff97",
"dockerize": "jwilder/dockerize:0.6.1",
"shellcheck": "koalaman/shellcheck:stable",
} }

@ -9,7 +9,6 @@ load(
load("scripts/drone/vault.star", "pull_secret") load("scripts/drone/vault.star", "pull_secret")
failure_template = "Build {{build.number}} failed for commit: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>: {{build.link}}\nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nAuthor: {{build.author}}" failure_template = "Build {{build.number}} failed for commit: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>: {{build.link}}\nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nAuthor: {{build.author}}"
drone_change_template = "`.drone.yml` and `starlark` files have been changed on the OSS repo, by: {{build.author}}. \nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nCommit hash: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>"
def pipeline( def pipeline(
name, name,
@ -31,7 +30,7 @@ def pipeline(
name: controls the pipeline name. name: controls the pipeline name.
trigger: a Drone trigger for the pipeline. trigger: a Drone trigger for the pipeline.
steps: the Drone steps for the pipeline. steps: the Drone steps for the pipeline.
services: auxilliary services used during the pipeline. services: auxiliary services used during the pipeline.
Defaults to []. Defaults to [].
platform: abstracts platform specific configuration primarily for different Drone behavior on Windows. platform: abstracts platform specific configuration primarily for different Drone behavior on Windows.
Defaults to 'linux'. Defaults to 'linux'.

@ -4,9 +4,14 @@ All the windows images needed to be in a different file than the other images, s
by trivy. Related issue: https://github.com/aquasecurity/trivy/issues/1392 by trivy. Related issue: https://github.com/aquasecurity/trivy/issues/1392
""" """
load(
"scripts/drone/variables.star",
"golang_version",
)
windows_images = { windows_images = {
"1809_image": "mcr.microsoft.com/windows:1809", "1809": "mcr.microsoft.com/windows:1809",
"wix_image": "grafana/ci-wix:0.1.1", "wix": "grafana/ci-wix:0.1.1",
"windows_server_core_image": "docker:windowsservercore-1809", "windows_server_core": "docker:windowsservercore-1809",
"windows_go_image": "grafana/grafana-ci-windows-test:0.1.0", "go": "golang:{}-windowsservercore-1809".format(golang_version),
} }

@ -0,0 +1,9 @@
"""
global variables
"""
grabpl_version = "v3.0.42"
golang_version = "1.20.10"
# nodejs_version should match what's in ".nvmrc", but without the v prefix.
nodejs_version = "18.12.0"

@ -14,6 +14,9 @@ azure_tenant = "azure_tenant"
rgm_gcp_key_base64 = "gcp_key_base64" rgm_gcp_key_base64 = "gcp_key_base64"
rgm_destination = "destination" rgm_destination = "destination"
rgm_storybook_destination = "rgm_storybook_destination"
rgm_cdn_destination = "rgm_cdn_destination"
rgm_downloads_destination = "rgm_downloads_destination"
rgm_github_token = "github_token" rgm_github_token = "github_token"
rgm_dagger_token = "dagger_token" rgm_dagger_token = "dagger_token"
@ -122,6 +125,21 @@ def secrets():
"infra/data/ci/grafana-release-eng/rgm", "infra/data/ci/grafana-release-eng/rgm",
"destination_prod", "destination_prod",
), ),
vault_secret(
rgm_storybook_destination,
"infra/data/ci/grafana-release-eng/rgm",
"storybook_destination",
),
vault_secret(
rgm_cdn_destination,
"infra/data/ci/grafana-release-eng/rgm",
"cdn_destination",
),
vault_secret(
rgm_downloads_destination,
"infra/data/ci/grafana-release-eng/rgm",
"downloads_destination",
),
vault_secret( vault_secret(
rgm_dagger_token, rgm_dagger_token,
"infra/data/ci/grafana-release-eng/rgm", "infra/data/ci/grafana-release-eng/rgm",
@ -143,21 +161,6 @@ def secrets():
"infra/data/ci/grafana-release-eng/grafana-delivery-bot", "infra/data/ci/grafana-release-eng/grafana-delivery-bot",
"app-private-key", "app-private-key",
), ),
vault_secret(
rgm_gcp_key_base64,
"infra/data/ci/grafana-release-eng/rgm",
"gcp_service_account_base64",
),
vault_secret(
rgm_destination,
"infra/data/ci/grafana-release-eng/rgm",
"destination",
),
vault_secret(
rgm_github_token,
"infra/data/ci/github/grafanabot",
"pat",
),
vault_secret( vault_secret(
"gcr_credentials", "gcr_credentials",
"secret/data/common/gcr", "secret/data/common/gcr",

@ -1,16 +0,0 @@
"""
This module returns the pipeline used for version branches.
"""
load(
"scripts/drone/events/release.star",
"oss_pipelines",
)
ver_mode = "release-branch"
trigger = {"ref": ["refs/heads/v[0-9]*"]}
def version_branch_pipelines():
return (
oss_pipelines(ver_mode = ver_mode, trigger = trigger)
)

@ -0,0 +1,80 @@
#!/bin/bash
# This script is used to validate the npm packages that are published to npmjs.org are in the correct format.
# It won't catch things like malformed JS or Types but it will assert that the package has
# the correct files and package.json properties.
ARTIFACTS_DIR="./npm-artifacts"
for file in "$ARTIFACTS_DIR"/*.tgz; do
echo "🔍 Checking NPM package: $file"
# get filename then strip everything after package name.
dir_name=$(basename "$file" .tgz | sed -E 's/@([a-zA-Z0-9-]+)-[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9-]+)?/\1/')
mkdir -p "./npm-artifacts/$dir_name"
tar -xzf "$file" -C "./npm-artifacts/$dir_name" --strip-components=1
# Make sure the tar wasn't empty
if [ ! -d "./npm-artifacts/$dir_name" ]; then
echo -e "❌ Failed: Empty package $dir_name.\n"
exit 1
fi
# Navigate inside the new extracted directory
pushd "./npm-artifacts/$dir_name" || exit
# Check for required files
check_files=("package.json" "README.md" "CHANGELOG.md" "LICENSE_APACHE2")
for check_file in "${check_files[@]}"; do
if [ ! -f "$check_file" ]; then
echo -e "❌ Failed: Missing required file $check_file in package $dir_name.\n"
exit 1
fi
done
# @grafana/toolkit structure is different to the other packages
if [[ "$dir_name" == "grafana-toolkit" ]]; then
if [ ! -d bin ] || [ ! -f bin/grafana-toolkit.js ]; then
echo -e "❌ Failed: Missing 'bin' directory or required files in package $dir_name.\n"
exit 1
fi
echo -e "✅ Passed: package checks for $file.\n"
popd || exit
continue
fi
# Assert commonjs builds
if [ ! -d dist ] || [ ! -f dist/index.js ] || [ ! -f dist/index.d.ts ]; then
echo -e "❌ Failed: Missing 'dist' directory or required commonjs files in package $dir_name.\n"
exit 1
fi
if [ "$(jq -r '.main' package.json)" != "dist/index.js" ] || \
[ "$(jq -r '.types' package.json)" != "dist/index.d.ts" ]; then
echo -e "❌ Failed: Incorrect package.json properties in package $dir_name.\n"
exit 1
fi
# Assert esm builds
esm_packages=("grafana-data" "grafana-ui" "grafana-runtime" "grafana-e2e-selectors" "grafana-schema")
for esm_package in "${esm_packages[@]}"; do
if [[ "$dir_name" == "$esm_package" ]]; then
if [ ! -d dist/esm ] || [ ! -f dist/esm/index.js ]; then
echo -e "❌ Failed: Missing 'dist/esm' directory or required esm files in package $dir_name.\n"
exit 1
fi
if [ "$(jq -r '.module' package.json)" != "dist/esm/index.js" ]; then
echo -e "❌ Failed: Incorrect package.json properties in package $dir_name.\n"
exit 1
fi
fi
done
echo -e "✅ Passed: package checks for $file.\n"
popd || exit
done
echo "🚀 All NPM package checks passed! 🚀"
rm -rf "${ARTIFACTS_DIR:?}/"*/
exit 0
Loading…
Cancel
Save