[v9.4.x] CI: Update CI/CD tooling and pipelines from main (#76881)

* CI: Update CI/CD tooling and pipelines from main (#76814)

* CI: Update CI/CD tooling and pipelines from main

* Update Makefile

* Comment out validate_openapi_spec_step

* Update broken frontend tests

* Fix validate-npm-packages regex to work without suffix

* Fix cypress image version

(cherry picked from commit 03ecb1db39)

* Fix path for ./pkg/kindsys/report.go on Makefile

* Re-add ./pkg/cmd/grafana-cli/runner to make gen-go
pull/77015/head
Guilherme Caulada 2 years ago committed by GitHub
parent 6365037e69
commit 2b54a169b2
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 20
      .drone.star
  2. 2380
      .drone.yml
  3. 126
      Makefile
  4. 24
      pkg/build/cmd.go
  5. 3
      pkg/build/cmd/buildbackend.go
  6. 3
      pkg/build/cmd/builddocker.go
  7. 3
      pkg/build/cmd/buildfrontend.go
  8. 3
      pkg/build/cmd/buildinternalplugins.go
  9. 3
      pkg/build/cmd/e2etests.go
  10. 3
      pkg/build/cmd/enterprisecheck.go
  11. 3
      pkg/build/cmd/exportversion.go
  12. 3
      pkg/build/cmd/fetchimages.go
  13. 3
      pkg/build/cmd/grafanacom.go
  14. 3
      pkg/build/cmd/main.go
  15. 3
      pkg/build/cmd/npm.go
  16. 3
      pkg/build/cmd/package.go
  17. 3
      pkg/build/cmd/publishaws.go
  18. 3
      pkg/build/cmd/publishgithub.go
  19. 3
      pkg/build/cmd/publishimages_enterprise2.go
  20. 3
      pkg/build/cmd/storestorybook.go
  21. 3
      pkg/build/cmd/uploadcdn.go
  22. 3
      pkg/build/cmd/uploadpackages.go
  23. 3
      pkg/build/cmd/uploadpackages_test.go
  24. 8
      pkg/build/cmd/verifydrone.go
  25. 3
      pkg/build/config/genmetadata.go
  26. 6
      pkg/build/config/genmetadata_test.go
  27. 26
      pkg/build/config/revision.go
  28. 2
      pkg/build/config/version.go
  29. 37
      pkg/build/config/versions.go
  30. 2
      pkg/build/docker/build.go
  31. 3
      pkg/build/droneutil/event_test.go
  32. 27
      pkg/build/env/fallback_test.go
  33. 4
      pkg/build/env/lookup_test.go
  34. 29
      pkg/build/frontend/config_test.go
  35. 3
      pkg/build/fsutil/copy_test.go
  36. 3
      pkg/build/fsutil/exists_test.go
  37. 27
      pkg/build/gcloud/storage/gsutil.go
  38. 13
      pkg/build/git.go
  39. 3
      pkg/build/git/git.go
  40. 3
      pkg/build/git/git_checks_test.go
  41. 3
      pkg/build/git/git_issues_test.go
  42. 3
      pkg/build/git/git_test.go
  43. 10
      pkg/build/grafana/build.go
  44. 2
      pkg/build/lerna/lerna.go
  45. 3
      pkg/build/metrics/publish.go
  46. 6
      pkg/build/npm/npm.go
  47. 10
      pkg/build/packaging/grafana.go
  48. 3
      pkg/build/packaging/grafana_test.go
  49. 3
      pkg/build/version.go
  50. 628
      scripts/drone/TAGS
  51. 36
      scripts/drone/events/cron.star
  52. 63
      scripts/drone/events/main.star
  53. 46
      scripts/drone/events/pr.star
  54. 239
      scripts/drone/events/release.star
  55. 80
      scripts/drone/pipelines/benchmarks.star
  56. 61
      scripts/drone/pipelines/build.star
  57. 24
      scripts/drone/pipelines/ci_images.star
  58. 10
      scripts/drone/pipelines/docs.star
  59. 39
      scripts/drone/pipelines/integration_tests.star
  60. 4
      scripts/drone/pipelines/lint_backend.star
  61. 8
      scripts/drone/pipelines/lint_frontend.star
  62. 16
      scripts/drone/pipelines/shellcheck.star
  63. 17
      scripts/drone/pipelines/test_backend.star
  64. 10
      scripts/drone/pipelines/test_frontend.star
  65. 10
      scripts/drone/pipelines/whats_new_checker.star
  66. 22
      scripts/drone/pipelines/windows.star
  67. 290
      scripts/drone/rgm.star
  68. 60
      scripts/drone/services/services.star
  69. 796
      scripts/drone/steps/lib.star
  70. 187
      scripts/drone/steps/lib_windows.star
  71. 61
      scripts/drone/steps/rgm.star
  72. 49
      scripts/drone/utils/images.star
  73. 3
      scripts/drone/utils/utils.star
  74. 13
      scripts/drone/utils/windows_images.star
  75. 9
      scripts/drone/variables.star
  76. 33
      scripts/drone/vault.star
  77. 16
      scripts/drone/version.star
  78. 80
      scripts/validate-npm-packages.sh

@ -7,8 +7,9 @@
This module returns a Drone configuration including pipelines and secrets.
"""
load("scripts/drone/events/pr.star", "pr_pipelines")
load("scripts/drone/events/cron.star", "cronjobs")
load("scripts/drone/events/main.star", "main_pipelines")
load("scripts/drone/events/pr.star", "pr_pipelines")
load(
"scripts/drone/events/release.star",
"integration_test_pipelines",
@ -17,24 +18,22 @@ load(
"publish_packages_pipeline",
)
load(
"scripts/drone/rgm.star",
"rgm",
"scripts/drone/pipelines/ci_images.star",
"publish_ci_build_container_image_pipeline",
"publish_ci_windows_test_image_pipeline",
)
load(
"scripts/drone/pipelines/publish_images.star",
"publish_image_pipelines_public",
)
load(
"scripts/drone/pipelines/ci_images.star",
"publish_ci_build_container_image_pipeline",
"publish_ci_windows_test_image_pipeline",
)
load(
"scripts/drone/pipelines/windows.star",
"windows_test_backend",
)
load("scripts/drone/version.star", "version_branch_pipelines")
load("scripts/drone/events/cron.star", "cronjobs")
load(
"scripts/drone/rgm.star",
"rgm",
)
load("scripts/drone/vault.star", "secrets")
def main(_ctx):
@ -50,7 +49,6 @@ def main(_ctx):
"event": ["promote"],
"target": ["test-windows"],
}, "oss", "testing")] +
version_branch_pipelines() +
integration_test_pipelines() +
publish_ci_windows_test_image_pipeline() +
publish_ci_build_container_image_pipeline() +

File diff suppressed because it is too large Load Diff

@ -13,9 +13,12 @@ GO = go
GO_FILES ?= ./pkg/...
SH_FILES ?= $(shell find ./scripts -name *.sh)
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
GO_BUILD_FLAGS += $(if $(GO_BUILD_DEV),-dev)
GO_BUILD_FLAGS += $(if $(GO_BUILD_TAGS),-build-tags=$(GO_BUILD_TAGS))
targets := $(shell echo '$(sources)' | tr "," " ")
GO_INTEGRATION_TESTS := $(shell find ./pkg -type f -name '*_test.go' -exec grep -l '^func TestIntegration' '{}' '+' | grep -o '\(.*\)/' | sort -u)
all: deps build
##@ Dependencies
@ -33,35 +36,68 @@ node_modules: package.json yarn.lock ## Install node modules.
##@ Swagger
SPEC_TARGET = public/api-spec.json
MERGED_SPEC_TARGET := public/api-merged.json
ENTERPRISE_SPEC_TARGET = public/api-enterprise-spec.json
MERGED_SPEC_TARGET = public/api-merged.json
NGALERT_SPEC_TARGET = pkg/services/ngalert/api/tooling/api.json
$(NGALERT_SPEC_TARGET):
+$(MAKE) -C pkg/services/ngalert/api/tooling api.json
$(MERGED_SPEC_TARGET): $(SPEC_TARGET) $(NGALERT_SPEC_TARGET) $(SWAGGER) ## Merge generated and ngalert API specs
$(MERGED_SPEC_TARGET): swagger-oss-gen swagger-enterprise-gen $(NGALERT_SPEC_TARGET) $(SWAGGER) ## Merge generated and ngalert API specs
# known conflicts DsPermissionType, AddApiKeyCommand, Json, Duration (identical models referenced by both specs)
$(SWAGGER) mixin $(SPEC_TARGET) $(NGALERT_SPEC_TARGET) --ignore-conflicts -o $(MERGED_SPEC_TARGET)
$(SWAGGER) mixin $(SPEC_TARGET) $(ENTERPRISE_SPEC_TARGET) $(NGALERT_SPEC_TARGET) --ignore-conflicts -o $(MERGED_SPEC_TARGET)
$(SPEC_TARGET): $(SWAGGER) ## Generate API Swagger specification
swagger-oss-gen: $(SWAGGER) ## Generate API Swagger specification
@echo "re-generating swagger for OSS"
rm -f $(SPEC_TARGET)
SWAGGER_GENERATE_EXTENSION=false $(SWAGGER) generate spec -m -w pkg/server -o $(SPEC_TARGET) \
-x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
-x "github.com/prometheus/alertmanager" \
-i pkg/api/swagger_tags.json \
--exclude-tag=alpha
--exclude-tag=alpha \
--exclude-tag=enterprise
# this file only exists if enterprise is enabled
ENTERPRISE_EXT_FILE = pkg/extensions/ext.go
ifeq ("$(wildcard $(ENTERPRISE_EXT_FILE))","") ## if enterprise is not enabled
swagger-enterprise-gen:
@echo "skipping re-generating swagger for enterprise: not enabled"
else
swagger-enterprise-gen: $(SWAGGER) ## Generate API Swagger specification
@echo "re-generating swagger for enterprise"
rm -f $(ENTERPRISE_SPEC_TARGET)
SWAGGER_GENERATE_EXTENSION=false $(SWAGGER) generate spec -m -w pkg/server -o $(ENTERPRISE_SPEC_TARGET) \
-x "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions" \
-x "github.com/prometheus/alertmanager" \
-i pkg/api/swagger_tags.json \
--exclude-tag=alpha \
--include-tag=enterprise
endif
swagger-api-spec: gen-go $(SPEC_TARGET) $(MERGED_SPEC_TARGET) validate-api-spec
swagger-gen: gen-go $(MERGED_SPEC_TARGET) swagger-validate
validate-api-spec: $(MERGED_SPEC_TARGET) $(SWAGGER) ## Validate API spec
swagger-validate: $(MERGED_SPEC_TARGET) $(SWAGGER) ## Validate API spec
$(SWAGGER) validate $(<)
clean-api-spec:
rm $(SPEC_TARGET) $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET)
swagger-clean:
rm -f $(SPEC_TARGET) $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET)
.PHONY: cleanup-old-git-hooks
cleanup-old-git-hooks:
./scripts/cleanup-husky.sh
.PHONY: lefthook-install
lefthook-install: cleanup-old-git-hooks $(LEFTHOOK) # install lefthook for pre-commit hooks
$(LEFTHOOK) install -f
.PHONY: lefthook-uninstall
lefthook-uninstall: $(LEFTHOOK)
$(LEFTHOOK) uninstall
##@ OpenAPI 3
OAPI_SPEC_TARGET = public/openapi3.json
openapi3-gen: swagger-api-spec ## Generates OpenApi 3 specs from the Swagger 2 already generated
openapi3-gen: swagger-gen ## Generates OpenApi 3 specs from the Swagger 2 already generated
$(GO) run scripts/openapi3/openapi3conv.go $(MERGED_SPEC_TARGET) $(OAPI_SPEC_TARGET)
##@ Building
@ -72,7 +108,7 @@ gen-cue: ## Do all CUE/Thema code generation
go generate ./public/app/plugins/gen.go
go generate ./pkg/kindsys/report.go
gen-go: $(WIRE) gen-cue
gen-go: $(WIRE)
@echo "generate go files"
$(WIRE) gen -tags $(WIRE_TAGS) ./pkg/server ./pkg/cmd/grafana-cli/runner
@ -84,7 +120,7 @@ fix-cue: $(CUE)
gen-jsonnet:
go generate ./devenv/jsonnet
build-go: $(MERGED_SPEC_TARGET) gen-go ## Build all Go binaries.
build-go: gen-go ## Build all Go binaries.
@echo "build go files"
$(GO) run build.go $(GO_BUILD_FLAGS) build
@ -126,19 +162,39 @@ test-go-unit: ## Run unit tests for backend with flags.
.PHONY: test-go-integration
test-go-integration: ## Run integration tests for backend with flags.
@echo "test backend integration tests"
$(GO) test -run Integration -covermode=atomic -timeout=30m ./pkg/...
$(GO) test -count=1 -run "^TestIntegration" -covermode=atomic -timeout=5m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-alertmanager
test-go-integration-alertmanager: ## Run integration tests for the remote alertmanager (config taken from the mimir_backend block).
@echo "test remote alertmanager integration tests"
$(GO) clean -testcache
AM_URL=http://localhost:8080 AM_TENANT_ID=test AM_PASSWORD=test \
$(GO) test -count=1 -run "^TestIntegrationRemoteAlertmanager" -covermode=atomic -timeout=5m ./pkg/services/ngalert/notifier/...
.PHONY: test-go-integration-postgres
test-go-integration-postgres: devenv-postgres ## Run integration tests for postgres backend with flags.
@echo "test backend integration postgres tests"
$(GO) clean -testcache
$(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=postgres go test -run Integration -covermode=atomic -timeout=2m {}'
GRAFANA_TEST_DB=postgres \
$(GO) test -p=1 -count=1 -run "^TestIntegration" -covermode=atomic -timeout=10m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-mysql
test-go-integration-mysql: devenv-mysql ## Run integration tests for mysql backend with flags.
@echo "test backend integration mysql tests"
GRAFANA_TEST_DB=mysql \
$(GO) test -p=1 -count=1 -run "^TestIntegration" -covermode=atomic -timeout=10m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-redis
test-go-integration-redis: ## Run integration tests for redis cache.
@echo "test backend integration redis tests"
$(GO) clean -testcache
$(GO) list './pkg/...' | xargs -I {} sh -c 'GRAFANA_TEST_DB=mysql go test -run Integration -covermode=atomic -timeout=2m {}'
REDIS_URL=localhost:6379 $(GO) test -run IntegrationRedis -covermode=atomic -timeout=2m $(GO_INTEGRATION_TESTS)
.PHONY: test-go-integration-memcached
test-go-integration-memcached: ## Run integration tests for memcached cache.
@echo "test backend integration memcached tests"
$(GO) clean -testcache
MEMCACHED_HOSTS=localhost:11211 $(GO) test -run IntegrationMemcached -covermode=atomic -timeout=2m $(GO_INTEGRATION_TESTS)
test-js: ## Run tests for frontend.
@echo "test frontend"
@ -162,19 +218,36 @@ shellcheck: $(SH_FILES) ## Run checks for shell scripts.
##@ Docker
TAG_SUFFIX=$(if $(WIRE_TAGS)!=oss,-$(WIRE_TAGS))
PLATFORM=linux/amd64
build-docker-full: ## Build Docker image for development.
@echo "build docker container"
DOCKER_BUILDKIT=1 \
docker build \
--tag grafana/grafana:dev .
tar -ch . | \
docker buildx build - \
--platform $(PLATFORM) \
--build-arg BINGO=false \
--build-arg GO_BUILD_TAGS=$(GO_BUILD_TAGS) \
--build-arg WIRE_TAGS=$(WIRE_TAGS) \
--build-arg COMMIT_SHA=$$(git rev-parse HEAD) \
--build-arg BUILD_BRANCH=$$(git rev-parse --abbrev-ref HEAD) \
--tag grafana/grafana$(TAG_SUFFIX):dev \
$(DOCKER_BUILD_ARGS)
build-docker-full-ubuntu: ## Build Docker image based on Ubuntu for development.
@echo "build docker container"
DOCKER_BUILDKIT=1 \
docker build \
--build-arg BASE_IMAGE=ubuntu:20.04 \
tar -ch . | \
docker buildx build - \
--platform $(PLATFORM) \
--build-arg BINGO=false \
--build-arg GO_BUILD_TAGS=$(GO_BUILD_TAGS) \
--build-arg WIRE_TAGS=$(WIRE_TAGS) \
--build-arg COMMIT_SHA=$$(git rev-parse HEAD) \
--build-arg BUILD_BRANCH=$$(git rev-parse --abbrev-ref HEAD) \
--build-arg BASE_IMAGE=ubuntu:22.04 \
--build-arg GO_IMAGE=golang:1.20.10 \
--tag grafana/grafana:dev-ubuntu .
--tag grafana/grafana$(TAG_SUFFIX):dev-ubuntu \
$(DOCKER_BUILD_ARGS)
##@ Services
@ -185,8 +258,6 @@ devenv:
@printf 'You have to define sources for this command \nexample: make devenv sources=postgres,openldap\n'
else
devenv: devenv-down ## Start optional services, e.g. postgres, prometheus, and elasticsearch.
$(eval targets := $(shell echo '$(sources)' | tr "," " "))
@cd devenv; \
./create_docker_compose.sh $(targets) || \
(rm -rf {docker-compose.yaml,conf.tmp,.env}; exit 1)
@ -219,6 +290,9 @@ devenv-mysql:
protobuf: ## Compile protobuf definitions
bash scripts/protobuf-check.sh
bash pkg/plugins/backendplugin/pluginextensionv2/generate.sh
bash pkg/plugins/backendplugin/secretsmanagerplugin/generate.sh
bash pkg/services/store/entity/generate.sh
bash pkg/infra/grn/generate.sh
clean: ## Clean up intermediate build artifacts.
@echo "cleaning"
@ -244,7 +318,7 @@ scripts/drone/TAGS: $(shell find scripts/drone -name '*.star')
etags --lang none --regex="/def \(\w+\)[^:]+:/\1/" --regex="/\s*\(\w+\) =/\1/" $^ -o $@
format-drone:
buildifier -r scripts/drone
buildifier --lint=fix -r scripts/drone
help: ## Display this help.
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m<target>\033[0m\n"} /^[a-zA-Z_-]+:.*?##/ { printf " \033[36m%-15s\033[0m %s\n", $$1, $$2 } /^##@/ { printf "\n\033[1m%s\033[0m\n", substr($$0, 5) } ' $(MAKEFILE_LIST)

@ -217,12 +217,32 @@ func ldflags(opts BuildOpts) (string, error) {
return "", err
}
commitSha := getGitSha()
if v := os.Getenv("COMMIT_SHA"); v != "" {
commitSha = v
}
var enterpriseCommitSha string
if opts.enterprise {
enterpriseCommitSha = getGitEnterpriseSha()
if v := os.Getenv("ENTERPRISE_COMMIT_SHA"); v != "" {
enterpriseCommitSha = v
}
}
buildBranch := getGitBranch()
if v := os.Getenv("BUILD_BRANCH"); v != "" {
buildBranch = v
}
var b bytes.Buffer
b.WriteString("-w")
b.WriteString(fmt.Sprintf(" -X main.version=%s", opts.version))
b.WriteString(fmt.Sprintf(" -X main.commit=%s", getGitSha()))
b.WriteString(fmt.Sprintf(" -X main.commit=%s", commitSha))
if enterpriseCommitSha != "" {
b.WriteString(fmt.Sprintf(" -X main.enterpriseCommit=%s", enterpriseCommitSha))
}
b.WriteString(fmt.Sprintf(" -X main.buildstamp=%d", buildStamp))
b.WriteString(fmt.Sprintf(" -X main.buildBranch=%s", getGitBranch()))
b.WriteString(fmt.Sprintf(" -X main.buildBranch=%s", buildBranch))
if v := os.Getenv("LDFLAGS"); v != "" {
b.WriteString(fmt.Sprintf(" -extldflags \"%s\"", v))
}

@ -4,12 +4,13 @@ import (
"fmt"
"log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/compilers"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/grafana"
"github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
)
func BuildBackend(ctx *cli.Context) error {

@ -3,10 +3,11 @@ package main
import (
"log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
)
func BuildDocker(c *cli.Context) error {

@ -3,11 +3,12 @@ package main
import (
"log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/frontend"
"github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
)
func BuildFrontend(c *cli.Context) error {

@ -4,11 +4,12 @@ import (
"context"
"log"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/errutil"
"github.com/grafana/grafana/pkg/build/plugins"
"github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
)
func BuildInternalPlugins(c *cli.Context) error {

@ -6,8 +6,9 @@ import (
"os"
"os/exec"
"github.com/grafana/grafana/pkg/build/e2eutil"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/e2eutil"
)
func EndToEndTests(c *cli.Context) error {

@ -6,9 +6,10 @@ import (
"os"
"strconv"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/env"
"github.com/grafana/grafana/pkg/build/git"
"github.com/urfave/cli/v2"
)
// checkOpts are options used to create a new GitHub check for the enterprise downstream test.

@ -4,8 +4,9 @@ import (
"os"
"path/filepath"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
)
func ExportVersion(c *cli.Context) error {

@ -6,10 +6,11 @@ import (
"os/exec"
"strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
)
const (

@ -9,7 +9,6 @@ import (
"net/http"
"net/url"
"os"
"path"
"path/filepath"
"strings"
@ -233,7 +232,7 @@ func getSHA256(u string) ([]byte, error) {
return sha256, nil
}
func postRequest(cfg packaging.PublishConfig, pth string, obj interface{}, descr string) error {
func postRequest(cfg packaging.PublishConfig, pth string, obj any, descr string) error {
var sfx string
switch cfg.Edition {
case config.EditionOSS:

@ -5,8 +5,9 @@ import (
"os"
"strings"
"github.com/grafana/grafana/pkg/build/docker"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/docker"
)
var additionalCommands []*cli.Command = make([]*cli.Command, 0, 5)

@ -6,8 +6,9 @@ import (
"os/exec"
"strings"
"github.com/grafana/grafana/pkg/build/npm"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/npm"
)
func NpmRetrieveAction(c *cli.Context) error {

@ -5,11 +5,12 @@ import (
"log"
"strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gpg"
"github.com/grafana/grafana/pkg/build/packaging"
"github.com/grafana/grafana/pkg/build/syncutil"
"github.com/urfave/cli/v2"
)
func Package(c *cli.Context) error {

@ -17,8 +17,9 @@ import (
"github.com/aws/aws-sdk-go/service/marketplacecatalog"
"github.com/docker/docker/api/types"
"github.com/docker/docker/client"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
)
const (

@ -9,9 +9,10 @@ import (
"strings"
"github.com/google/go-github/github"
"github.com/grafana/grafana/pkg/build/config"
"github.com/urfave/cli/v2"
"golang.org/x/oauth2"
"github.com/grafana/grafana/pkg/build/config"
)
type githubRepositoryService interface {

@ -6,10 +6,11 @@ import (
"os"
"os/exec"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/docker"
"github.com/grafana/grafana/pkg/build/gcloud"
"github.com/urfave/cli/v2"
)
func Enterprise2(c *cli.Context) error {

@ -4,9 +4,10 @@ import (
"log"
"path/filepath"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gcloud/storage"
"github.com/urfave/cli/v2"
)
// StoreStorybook implements the sub-command "store-storybook".

@ -6,9 +6,10 @@ import (
"os"
"path/filepath"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/gcloud/storage"
"github.com/urfave/cli/v2"
)
// UploadCDN implements the sub-command "upload-cdn".

@ -9,11 +9,12 @@ import (
"path/filepath"
"strings"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/droneutil"
"github.com/grafana/grafana/pkg/build/gcloud"
"github.com/grafana/grafana/pkg/build/packaging"
"github.com/urfave/cli/v2"
)
const releaseFolder = "release"

@ -5,9 +5,10 @@ import (
"fmt"
"testing"
"github.com/grafana/grafana/pkg/build/config"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/config"
)
func Test_getVersionFolder(t *testing.T) {

@ -10,9 +10,7 @@ import (
"github.com/drone/drone-cli/drone/lint"
"github.com/drone/drone-cli/drone/starlark"
"github.com/google/go-cmp/cmp"
cliv1 "github.com/urfave/cli"
"github.com/urfave/cli/v2"
"gopkg.in/yaml.v3"
@ -70,7 +68,7 @@ func VerifyDrone(c *cli.Context) error {
return nil
}
func readConfig(fpath string) ([]map[string]interface{}, error) {
func readConfig(fpath string) ([]map[string]any, error) {
//nolint:gosec
f, err := os.Open(fpath)
if err != nil {
@ -84,9 +82,9 @@ func readConfig(fpath string) ([]map[string]interface{}, error) {
// The YAML stream may contain multiple pipeline configurations, read them all
dec := yaml.NewDecoder(f)
var c []map[string]interface{}
var c []map[string]any
for {
var m map[string]interface{}
var m map[string]any
if err := dec.Decode(&m); err != nil {
if errors.Is(err, io.EOF) {
break

@ -5,8 +5,9 @@ import (
"os"
"strings"
"github.com/grafana/grafana/pkg/build/droneutil"
"github.com/urfave/cli/v2"
"github.com/grafana/grafana/pkg/build/droneutil"
)
func GenerateMetadata(c *cli.Context) (Metadata, error) {

@ -72,10 +72,8 @@ func setUpEnv(t *testing.T, envMap map[string]string) {
t.Helper()
os.Clearenv()
err := os.Setenv("DRONE_COMMIT", "abcd12345")
require.NoError(t, err)
t.Setenv("DRONE_COMMIT", "abcd12345")
for k, v := range envMap {
err := os.Setenv(k, v)
require.NoError(t, err)
t.Setenv(k, v)
}
}

@ -3,6 +3,7 @@ package config
import (
"context"
"fmt"
"log"
"strconv"
"time"
@ -10,9 +11,10 @@ import (
)
type Revision struct {
Timestamp int64
SHA256 string
Branch string
Timestamp int64
SHA256 string
EnterpriseCommit string
Branch string
}
func GrafanaTimestamp(ctx context.Context, dir string) (int64, error) {
@ -42,14 +44,26 @@ func GrafanaRevision(ctx context.Context, grafanaDir string) (Revision, error) {
return Revision{}, err
}
enterpriseCommit, err := executil.OutputAt(ctx, grafanaDir, "git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "..", "rev-parse", "--short", "HEAD")
if err != nil {
enterpriseCommit, err = executil.OutputAt(ctx, grafanaDir, "git", "-C", "/tmp/grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
log.Println("Could not get enterprise commit. Error:", err)
}
}
}
branch, err := executil.OutputAt(ctx, grafanaDir, "git", "rev-parse", "--abbrev-ref", "HEAD")
if err != nil {
return Revision{}, err
}
return Revision{
SHA256: sha,
Branch: branch,
Timestamp: stamp,
SHA256: sha,
EnterpriseCommit: enterpriseCommit,
Branch: branch,
Timestamp: stamp,
}, nil
}

@ -94,7 +94,7 @@ func GetPackageJSONVersion(grafanaDir string) (string, error) {
if err != nil {
return "", fmt.Errorf("failed to read %q: %w", pkgJSONPath, err)
}
pkgObj := map[string]interface{}{}
pkgObj := map[string]any{}
if err := json.Unmarshal(pkgJSONB, &pkgObj); err != nil {
return "", fmt.Errorf("failed decoding %q: %w", pkgJSONPath, err)
}

@ -1,7 +1,5 @@
package config
const PublicBucket = "grafana-downloads"
var Versions = VersionMap{
PullRequestMode: {
Variants: []Variant{
@ -9,8 +7,9 @@ var Versions = VersionMap{
VariantLinuxAmd64Musl,
VariantDarwinAmd64,
VariantWindowsAmd64,
VariantArm64,
VariantArm64Musl,
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// VariantArm64,
// VariantArm64Musl,
},
PluginSignature: PluginSignature{
Sign: false,
@ -29,9 +28,10 @@ var Versions = VersionMap{
},
MainMode: {
Variants: []Variant{
VariantArmV6,
VariantArmV7,
VariantArmV7Musl,
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// VariantArmV6,
// VariantArmV7,
// VariantArmV7Musl,
VariantArm64,
VariantArm64Musl,
VariantDarwinAmd64,
@ -48,7 +48,8 @@ var Versions = VersionMap{
Architectures: []Architecture{
ArchAMD64,
ArchARM64,
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
},
Distribution: []Distribution{
Alpine,
@ -64,9 +65,10 @@ var Versions = VersionMap{
},
DownstreamMode: {
Variants: []Variant{
VariantArmV6,
VariantArmV7,
VariantArmV7Musl,
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// VariantArmV6,
//VariantArmV7,
// VariantArmV7Musl,
VariantArm64,
VariantArm64Musl,
VariantDarwinAmd64,
@ -83,7 +85,8 @@ var Versions = VersionMap{
Architectures: []Architecture{
ArchAMD64,
ArchARM64,
ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7, // GOARCH=ARM is used for both armv6 and armv7. They are differentiated by the GOARM variable.
},
Distribution: []Distribution{
Alpine,
@ -171,9 +174,10 @@ var Versions = VersionMap{
},
Enterprise2Mode: {
Variants: []Variant{
VariantArmV6,
VariantArmV7,
VariantArmV7Musl,
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// VariantArmV6,
// VariantArmV7,
// VariantArmV7Musl,
VariantArm64,
VariantArm64Musl,
VariantDarwinAmd64,
@ -190,7 +194,8 @@ var Versions = VersionMap{
Architectures: []Architecture{
ArchAMD64,
ArchARM64,
ArchARMv7,
// https://github.com/golang/go/issues/58425 disabling arm builds until go issue is resolved
// ArchARMv7,
},
Distribution: []Distribution{
Alpine,

@ -74,7 +74,7 @@ func BuildImage(version string, arch config.Architecture, grafanaDir string, use
tagSuffix := ""
if useUbuntu {
libc = ""
baseImage = fmt.Sprintf("%subuntu:20.04", baseArch)
baseImage = fmt.Sprintf("%subuntu:22.04", baseArch)
tagSuffix = "-ubuntu"
}

@ -3,8 +3,9 @@ package droneutil_test
import (
"testing"
"github.com/grafana/grafana/pkg/build/droneutil"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/droneutil"
)
func TestGetDroneEvent(t *testing.T) {

@ -15,6 +15,11 @@ const (
flag2 = "flag2"
)
type flagObj struct {
name string
value string
}
func TestRequireListWithEnvFallback(t *testing.T) {
var app = cli.NewApp()
tests := []struct {
@ -73,7 +78,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
}{
{
testName: "string present in the context",
ctx: cli.NewContext(app, setFlags(t, flag1, flag2, flag.NewFlagSet("test", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError), flagObj{name: flag1, value: "a"}), nil),
name: flag1,
envName: "",
expected: "a",
@ -81,7 +86,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
},
{
testName: "string present in env",
ctx: cli.NewContext(app, setFlags(t, "", "", flag.NewFlagSet("test", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError)), nil),
name: flag1,
envName: setEnv(t, flag1, "a"),
expected: "a",
@ -89,7 +94,7 @@ func TestRequireStringWithEnvFallback(t *testing.T) {
},
{
testName: "string absent from both context and env",
ctx: cli.NewContext(app, setFlags(t, "", flag2, flag.NewFlagSet("test", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("test", flag.ContinueOnError), flagObj{name: flag2, value: "b"}), nil),
name: flag1,
envName: "",
expected: "",
@ -120,13 +125,12 @@ func applyFlagSet(t *testing.T, aFlag, aValue string) *flag.FlagSet {
return set
}
func setFlags(t *testing.T, flag1, flag2 string, flagSet *flag.FlagSet) *flag.FlagSet {
func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet {
t.Helper()
if flag1 != "" {
flagSet.StringVar(&flag1, "flag1", "a", "")
}
if flag2 != "" {
flagSet.StringVar(&flag2, "flag2", "b", "")
for _, f := range flags {
if f.name != "" {
flagSet.StringVar(&f.name, f.name, f.value, "")
}
}
return flagSet
}
@ -135,9 +139,6 @@ func setEnv(t *testing.T, key, value string) string {
t.Helper()
os.Clearenv()
err := os.Setenv(key, value)
if err != nil {
require.NoError(t, err)
}
t.Setenv(key, value)
return key
}

@ -3,9 +3,9 @@ package env_test
import (
"testing"
"github.com/grafana/grafana/pkg/build/env"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/env"
)
func TestLookup(t *testing.T) {

@ -21,6 +21,11 @@ type packageJson struct {
Version string `json:"version"`
}
type flagObj struct {
name string
value string
}
var app = cli.NewApp()
func TestGetConfig(t *testing.T) {
@ -32,35 +37,35 @@ func TestGetConfig(t *testing.T) {
wantErr bool
}{
{
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "package.json matches tag",
packageJsonVersion: "10.0.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: false,
},
{
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "custom tag, package.json doesn't match",
packageJsonVersion: "10.0.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0-abcd123pre", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: false,
},
{
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "package.json doesn't match tag",
packageJsonVersion: "10.1.0",
metadata: config.Metadata{GrafanaVersion: "10.0.0", ReleaseMode: config.ReleaseMode{Mode: config.TagMode}},
wantErr: true,
},
{
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, "", flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}), nil),
name: "test tag event, check should be skipped",
packageJsonVersion: "10.1.0",
metadata: config.Metadata{GrafanaVersion: "10.1.0-test", ReleaseMode: config.ReleaseMode{Mode: config.TagMode, IsTest: true}},
wantErr: false,
},
{
ctx: cli.NewContext(app, setFlags(t, jobs, githubToken, buildID, flag.NewFlagSet("flagSet", flag.ContinueOnError)), nil),
ctx: cli.NewContext(app, setFlags(t, flag.NewFlagSet("flagSet", flag.ContinueOnError), flagObj{name: jobs, value: "2"}, flagObj{name: githubToken, value: "token"}, flagObj{name: buildID, value: "12345"}), nil),
name: "non-tag event",
packageJsonVersion: "10.1.0-pre",
metadata: config.Metadata{GrafanaVersion: "10.1.0-12345pre", ReleaseMode: config.ReleaseMode{Mode: config.PullRequestMode}},
@ -85,16 +90,12 @@ func TestGetConfig(t *testing.T) {
}
}
func setFlags(t *testing.T, flag1, flag2, flag3 string, flagSet *flag.FlagSet) *flag.FlagSet {
func setFlags(t *testing.T, flagSet *flag.FlagSet, flags ...flagObj) *flag.FlagSet {
t.Helper()
if flag1 != "" {
flagSet.StringVar(&flag1, jobs, "2", "")
}
if flag2 != "" {
flagSet.StringVar(&flag2, githubToken, "token", "")
}
if flag3 != "" {
flagSet.StringVar(&flag3, buildID, "12345", "")
for _, f := range flags {
if f.name != "" {
flagSet.StringVar(&f.name, f.name, f.value, "")
}
}
return flagSet
}

@ -5,9 +5,10 @@ import (
"runtime"
"testing"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/fsutil"
)
func TestCopyFile(t *testing.T) {

@ -3,8 +3,9 @@ package fsutil_test
import (
"testing"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/fsutil"
)
func TestExists_NonExistent(t *testing.T) {

@ -16,10 +16,11 @@ import (
"time"
"cloud.google.com/go/storage"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/grafana/grafana/pkg/build/gcloud"
"google.golang.org/api/iterator"
"google.golang.org/api/option"
"github.com/grafana/grafana/pkg/build/fsutil"
"github.com/grafana/grafana/pkg/build/gcloud"
)
var (
@ -388,11 +389,17 @@ func GetLatestMainBuild(ctx context.Context, bucket *storage.BucketHandle, path
return "", ErrorNilBucket
}
it := bucket.Objects(ctx, &storage.Query{
query := &storage.Query{
Prefix: path,
})
}
err := query.SetAttrSelection([]string{"Name", "Generation"})
if err != nil {
return "", fmt.Errorf("failed to set attribute selector, err: %q", err)
}
it := bucket.Objects(ctx, query)
var files []string
var oldGeneration int64
for {
attrs, err := it.Next()
if errors.Is(err, iterator.Done) {
@ -401,13 +408,17 @@ func GetLatestMainBuild(ctx context.Context, bucket *storage.BucketHandle, path
if err != nil {
return "", fmt.Errorf("failed to iterate through bucket, err: %w", err)
}
files = append(files, attrs.Name)
if attrs.Generation >= oldGeneration {
files = append([]string{attrs.Name}, files...)
oldGeneration = attrs.Generation
} else {
files = append(files, attrs.Name)
}
}
var latestVersion string
for i := len(files) - 1; i >= 0; i-- {
captureVersion := regexp.MustCompile(`(\d+\.\d+\.\d+-\d+pre)`)
for i := 0; i < len(files); i++ {
captureVersion := regexp.MustCompile(`(\d+\.\d+\.\d+-\d+)`)
if captureVersion.MatchString(files[i]) {
latestVersion = captureVersion.FindString(files[i])
break

@ -15,3 +15,16 @@ func getGitSha() string {
}
return string(v)
}
func getGitEnterpriseSha() string {
// supporting the old way of dev setup
v, err := runError("git", "-C", "../grafana-enterprise", "rev-parse", "--short", "HEAD")
if err != nil {
// supporting the new way of dev setup
v, err = runError("git", "-C", "..", "rev-parse", "--short", "HEAD")
if err != nil {
return ""
}
}
return string(v)
}

@ -8,8 +8,9 @@ import (
"regexp"
"github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/stringutil"
"golang.org/x/oauth2"
"github.com/grafana/grafana/pkg/build/stringutil"
)
const (

@ -6,8 +6,9 @@ import (
"testing"
"github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/git"
)
type TestChecksService struct {

@ -6,8 +6,9 @@ import (
"testing"
"github.com/google/go-github/v45/github"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/build/git"
)
type TestLabelsService struct {

@ -3,8 +3,9 @@ package git_test
import (
"testing"
"github.com/grafana/grafana/pkg/build/git"
"github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/build/git"
)
func TestPRCheckRegexp(t *testing.T) {

@ -23,13 +23,19 @@ const (
)
func GrafanaLDFlags(version string, r config.Revision) []string {
return []string{
cmd := []string{
"-w",
fmt.Sprintf("-X main.version=%s", version),
fmt.Sprintf("-X main.commit=%s", r.SHA256),
fmt.Sprintf("-X main.buildstamp=%d", r.Timestamp),
fmt.Sprintf("-X main.buildBranch=%s", r.Branch),
}
if r.EnterpriseCommit != "" {
cmd = append(cmd, fmt.Sprintf("-X main.enterpriseCommit=%s", r.EnterpriseCommit))
}
return cmd
}
// BinaryFolder returns the path to where the Grafana binary is build given the provided arguments.
@ -99,7 +105,7 @@ func BuildGrafanaBinary(ctx context.Context, name, version string, args BuildArg
descriptor := GrafanaDescriptor(opts)
log.Printf("Building %q for %s\nwith env: %v", binary, descriptor, opts.Env())
log.Printf("Building %q for %s", binary, descriptor)
opts.LdFlags = append(args.LdFlags, GrafanaLDFlags(version, revision)...)

@ -47,7 +47,7 @@ func GetLernaVersion(grafanaDir string) (string, error) {
if err != nil {
return "", fmt.Errorf("failed to read %q: %w", lernaJSONPath, err)
}
pkgObj := map[string]interface{}{}
pkgObj := map[string]any{}
if err := json.Unmarshal(lernaJSONB, &pkgObj); err != nil {
return "", fmt.Errorf("failed decoding %q: %w", lernaJSONPath, err)
}

@ -5,10 +5,9 @@ import (
"encoding/json"
"fmt"
"log"
"net/http"
"strconv"
"time"
"net/http"
)
type payload struct {

@ -21,11 +21,11 @@ const NpmArtifactDir = "./npm-artifacts"
var packages = []string{
"@grafana/ui",
"@grafana/data",
"@grafana/toolkit",
"@grafana/runtime",
"@grafana/e2e",
"@grafana/e2e-selectors",
"@grafana/schema",
"@grafana/flamegraph",
}
// PublishNpmPackages will publish local NPM packages to NPM registry.
@ -117,11 +117,11 @@ func FetchNpmPackages(ctx context.Context, tag, bucketName string) error {
// Latest and next is 9.1.6.
// 9.2.0-beta1 is released, the latest should stay on 9.1.6, next should point to 9.2.0-beta1
// No move of dist-tags
// 9.1.7 is relased, the latest should point to 9.1.7, next should stay to 9.2.0-beta1
// 9.1.7 is released, the latest should point to 9.1.7, next should stay to 9.2.0-beta1
// No move of dist-tags
// Next week 9.2.0-beta2 is released, the latest should point to 9.1.7, next should point to 9.2.0-beta2
// No move of dist-tags
// In two weeks 9.2.0 stable is relased, the latest and next should point to 9.2.0.
// In two weeks 9.2.0 stable is released, the latest and next should point to 9.2.0.
// The next dist-tag is moved to point to 9.2.0.
//
// 3. Releasing an older stable than the current stable

@ -381,6 +381,9 @@ func executeFPM(options linuxPackageOptions, packageRoot, srcDir string) error {
"--vendor", vendor,
"-a", string(options.packageArch),
}
if options.prermSrc != "" {
args = append(args, "--before-remove", options.prermSrc)
}
if options.edition == config.EditionEnterprise || options.edition == config.EditionEnterprise2 || options.goArch == config.ArchARMv6 {
args = append(args, "--conflicts", "grafana")
}
@ -530,7 +533,7 @@ func copyPlugins(ctx context.Context, v config.Variant, grafanaDir, tmpDir strin
if err != nil {
return fmt.Errorf("failed to read %q: %w", filepath.Join(srcDir, "plugin.json"), err)
}
var plugJSON map[string]interface{}
var plugJSON map[string]any
if err := json.Unmarshal(jsonB, &plugJSON); err != nil {
return err
}
@ -729,6 +732,7 @@ func realPackageVariant(ctx context.Context, v config.Variant, edition config.Ed
initdScriptFilePath: "/etc/init.d/grafana-server",
systemdServiceFilePath: "/usr/lib/systemd/system/grafana-server.service",
postinstSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "postinst"),
prermSrc: filepath.Join(grafanaDir, "packaging", "deb", "control", "prerm"),
initdScriptSrc: filepath.Join(grafanaDir, "packaging", "deb", "init.d", "grafana-server"),
defaultFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "default", "grafana-server"),
systemdFileSrc: filepath.Join(grafanaDir, "packaging", "deb", "systemd", "grafana-server.service"),
@ -767,8 +771,7 @@ func realPackageVariant(ctx context.Context, v config.Variant, edition config.Ed
defaultFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "sysconfig", "grafana-server"),
systemdFileSrc: filepath.Join(grafanaDir, "packaging", "rpm", "systemd", "grafana-server.service"),
wrapperFilePath: filepath.Join(grafanaDir, "packaging", "wrappers"),
// chkconfig is depended on since our systemd service wraps a SysV init script, and that requires chkconfig
depends: []string{"/sbin/service", "chkconfig", "fontconfig", "freetype"},
depends: []string{"/sbin/service", "fontconfig", "freetype"},
}); err != nil {
return err
}
@ -845,6 +848,7 @@ type linuxPackageOptions struct {
initdScriptFilePath string
systemdServiceFilePath string
postinstSrc string
prermSrc string
initdScriptSrc string
defaultFileSrc string
systemdFileSrc string

@ -3,9 +3,10 @@ package packaging_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/grafana/grafana/pkg/build/config"
"github.com/grafana/grafana/pkg/build/packaging"
"github.com/stretchr/testify/assert"
)
func TestPackageRegexp(t *testing.T) {

@ -4,6 +4,7 @@ import (
"encoding/json"
"fmt"
"os"
"path/filepath"
"strings"
"time"
)
@ -14,7 +15,7 @@ type PackageJSON struct {
// Opens the package.json file in the provided directory and returns a struct that represents its contents
func OpenPackageJSON(dir string) (PackageJSON, error) {
reader, err := os.Open("package.json")
reader, err := os.Open(filepath.Clean(dir + "/package.json"))
if err != nil {
return PackageJSON{}, err
}

@ -1,628 +0,0 @@
events/release.star,6652
ver_mode =ver_mode64,1602
release_trigger =release_trigger65,1623
def store_npm_packages_step():store_npm_packages_step74,1752
def retrieve_npm_packages_step():retrieve_npm_packages_step90,2193
def release_npm_packages_step():release_npm_packages_step107,2663
def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):oss_pipelines123,3076
environment =environment135,3492
edition =edition136,3529
services =services137,3549
volumes =volumes138,3609
package_steps =package_steps139,3659
publish_steps =publish_steps140,3682
should_publish =should_publish141,3705
should_upload =should_upload142,3748
init_steps =init_steps143,3818
build_steps =build_steps152,4033
integration_test_steps =integration_test_steps159,4342
build_storybook =build_storybook182,5254
publish_step =publish_step190,5674
store_npm_step =store_npm_step191,5758
windows_package_steps =windows_package_steps196,5957
windows_pipeline =windows_pipeline198,6044
name =name199,6077
edition =edition200,6127
trigger =trigger201,6154
steps =steps202,6181
platform =platform203,6256
depends_on =depends_on204,6286
environment =environment207,6393
pipelines =pipelines209,6434
name =name211,6470
edition =edition212,6550
trigger =trigger213,6581
services =services214,6612
steps =steps215,6639
environment =environment216,6717
volumes =volumes217,6756
name =name225,6970
edition =edition226,7038
trigger =trigger227,7073
services =services228,7108
steps =steps229,7145
environment =environment230,7329
volumes =volumes231,7372
deps =deps234,7433
def enterprise_pipelines(ver_mode = ver_mode, trigger = release_trigger):enterprise_pipelines247,7856
environment =environment259,8284
edition =edition260,8328
services =services261,8355
volumes =volumes262,8415
package_steps =package_steps263,8465
publish_steps =publish_steps264,8488
should_publish =should_publish265,8511
should_upload =should_upload266,8554
include_enterprise =include_enterprise267,8624
edition2 =edition2268,8673
init_steps =init_steps269,8702
build_steps =build_steps277,8909
integration_test_steps =integration_test_steps284,9218
build_storybook =build_storybook312,10299
publish_step =publish_step324,10892
store_npm_step =store_npm_step325,10976
windows_package_steps =windows_package_steps330,11175
step =step333,11284
deps_on_clone_enterprise_step =deps_on_clone_enterprise_step337,11418
windows_pipeline =windows_pipeline347,11746
name =name348,11779
edition =edition349,11836
trigger =trigger350,11863
steps =steps351,11890
platform =platform352,11965
depends_on =depends_on353,11995
environment =environment356,12109
pipelines =pipelines358,12150
name =name360,12186
edition =edition361,12273
trigger =trigger362,12304
services =services363,12335
steps =steps364,12362
environment =environment365,12440
volumes =volumes366,12479
name =name374,12711
edition =edition375,12786
trigger =trigger376,12821
services =services377,12856
steps =steps378,12893
environment =environment379,13213
volumes =volumes380,13256
deps =deps383,13317
def enterprise2_pipelines(prefix = "", ver_mode = ver_mode, trigger = release_trigger):enterprise2_pipelines397,13769
environment =environment412,14364
edition =edition415,14424
volumes =volumes416,14451
package_steps =package_steps417,14501
publish_steps =publish_steps418,14524
should_publish =should_publish419,14547
should_upload =should_upload420,14590
include_enterprise =include_enterprise421,14660
edition2 =edition2422,14709
init_steps =init_steps423,14738
build_steps =build_steps431,14945
fetch_images =fetch_images442,15355
upload_cdn =upload_cdn444,15497
step =step458,16187
deps_on_clone_enterprise_step =deps_on_clone_enterprise_step462,16321
pipelines =pipelines472,16608
name =name474,16644
edition =edition475,16742
trigger =trigger476,16773
services =services477,16804
steps =steps478,16831
volumes =volumes479,16909
environment =environment480,16940
def publish_artifacts_step(mode):publish_artifacts_step486,17019
security =security487,17053
security =security489,17098
def publish_artifacts_pipelines(mode):publish_artifacts_pipelines501,17538
trigger =trigger502,17577
steps =steps506,17655
name =name512,17768
trigger =trigger513,17820
steps =steps514,17847
edition =edition515,17870
environment =environment516,17895
def publish_packages_pipeline():publish_packages_pipeline519,17945
trigger =trigger526,18162
oss_steps =oss_steps530,18244
enterprise_steps =enterprise_steps538,18560
deps =deps545,18903
name =name552,19062
trigger =trigger553,19101
steps =steps554,19128
edition =edition555,19155
depends_on =depends_on556,19180
environment =environment557,19207
name =name559,19266
trigger =trigger560,19312
steps =steps561,19339
edition =edition562,19373
depends_on =depends_on563,19398
environment =environment564,19425
def publish_npm_pipelines(mode):publish_npm_pipelines567,19482
trigger =trigger568,19515
steps =steps572,19593
name =name580,19772
trigger =trigger581,19827
steps =steps582,19854
edition =edition583,19877
environment =environment584,19902
def artifacts_page_pipeline():artifacts_page_pipeline587,19952
trigger =trigger588,19983
name =name593,20087
trigger =trigger594,20128
steps =steps595,20155
edition =edition596,20220
environment =environment597,20245
def get_e2e_suffix():get_e2e_suffix600,20295
events/cron.star,1016
aquasec_trivy_image =aquasec_trivy_image8,209
def cronjobs(edition):cronjobs10,255
grafana_com_nightly_pipeline =grafana_com_nightly_pipeline11,278
cronName =cronName12,332
name =name13,374
steps =steps14,412
def cron_job_pipeline(cronName, name, steps):cron_job_pipeline24,773
def scan_docker_image_pipeline(edition, tag):scan_docker_image_pipeline43,1175
edition =edition55,1530
edition =edition57,1579
docker_image =docker_image59,1608
cronName =cronName62,1695
name =name63,1725
steps =steps64,1775
def scan_docker_image_unkown_low_medium_vulnerabilities_step(docker_image):scan_docker_image_unkown_low_medium_vulnerabilities_step71,2047
def scan_docker_image_high_critical_vulnerabilities_step(docker_image):scan_docker_image_high_critical_vulnerabilities_step80,2353
def slack_job_failed_step(channel, image):slack_job_failed_step89,2646
def post_to_grafana_com_step():post_to_grafana_com_step103,3069
events/main.star,633
ver_mode =ver_mode49,966
trigger =trigger50,984
def main_pipelines(edition):main_pipelines62,1168
drone_change_trigger =drone_change_trigger63,1197
pipelines =pipelines79,1513
name =name89,1951
slack_channel =slack_channel90,1994
trigger =trigger91,2045
template =template92,2089
secret =secret93,2135
name =name97,2276
slack_channel =slack_channel98,2310
trigger =trigger99,2366
depends_on =depends_on100,2425
template =template101,2563
secret =secret102,2604
events/pr.star,252
ver_mode =ver_mode48,997
trigger =trigger49,1013
def pr_pipelines(edition):pr_pipelines62,1198
def get_pr_trigger(include_paths = None, exclude_paths = None):get_pr_trigger76,2396
paths_ex =paths_ex91,3080
paths_in =paths_in92,3115
services/services.star,225
def integration_test_services_volumes():integration_test_services_volumes5,79
def integration_test_services(edition):integration_test_services14,292
services =services15,332
def ldap_service():ldap_service59,1616
utils/utils.star,561
failure_template =failure_template11,191
drone_change_template =drone_change_template12,509
services =services19,932
platform =platform20,955
depends_on =depends_on21,983
environment =environment22,1008
volumes =volumes23,1036
platform_conf =platform_conf50,2166
platform_conf =platform_conf62,2534
pipeline =pipeline70,2713
def notify_pipeline(name, slack_channel, trigger, depends_on = [], template = None, secret = None):notify_pipeline105,3545
trigger =trigger106,3645
pipelines/trigger_downstream.star,440
trigger =trigger14,249
def enterprise_downstream_pipeline(edition, ver_mode):enterprise_downstream_pipeline26,433
environment =environment27,488
steps =steps28,527
deps =deps29,587
name =name31,672
edition =edition32,714
trigger =trigger33,741
services =services34,768
steps =steps35,791
depends_on =depends_on36,814
environment =environment37,841
pipelines/verify_starlark.star,323
def verify_starlark(trigger, ver_mode):verify_starlark17,305
environment =environment18,345
steps =steps19,382
name =name26,546
edition =edition27,600
trigger =trigger28,625
services =services29,652
steps =steps30,675
environment =environment31,698
pipelines/build.star,508
def build_e2e(trigger, ver_mode, edition):build_e2e39,936
environment =environment50,1096
variants =variants51,1135
init_steps =init_steps52,1219
build_steps =build_steps61,1491
publish_suffix =publish_suffix107,4049
publish_suffix =publish_suffix109,4100
name =name112,4158
edition =edition113,4224
environment =environment114,4249
services =services115,4284
steps =steps116,4307
trigger =trigger117,4349
pipelines/shellcheck.star,386
trigger =trigger15,235
def shellcheck_step():shellcheck_step31,483
def shellcheck_pipeline():shellcheck_pipeline43,725
environment =environment44,752
steps =steps45,789
name =name50,886
edition =edition51,918
trigger =trigger52,943
services =services53,970
steps =steps54,993
environment =environment55,1016
pipelines/verify_drone.star,317
def verify_drone(trigger, ver_mode):verify_drone17,293
environment =environment18,330
steps =steps19,367
name =name26,528
edition =edition27,579
trigger =trigger28,604
services =services29,631
steps =steps30,654
environment =environment31,677
pipelines/test_backend.star,474
def test_backend(trigger, ver_mode, edition = "oss"):test_backend23,463
environment =environment35,882
init_steps =init_steps36,921
test_steps =test_steps46,1291
pipeline_name =pipeline_name51,1387
pipeline_name =pipeline_name53,1492
name =name55,1584
edition =edition56,1614
trigger =trigger57,1641
services =services58,1668
steps =steps59,1691
environment =environment60,1732
pipelines/lint_frontend.star,415
def lint_frontend_pipeline(trigger, ver_mode):lint_frontend_pipeline16,260
environment =environment26,546
yarn_step =yarn_step27,583
init_steps =init_steps29,660
test_steps =test_steps33,736
name =name37,812
edition =edition38,864
trigger =trigger39,889
services =services40,916
steps =steps41,939
environment =environment42,980
pipelines/docs.star,494
docs_paths =docs_paths19,383
def docs_pipelines(edition, ver_mode, trigger):docs_pipelines28,511
environment =environment29,559
steps =steps30,598
name =name40,815
edition =edition41,858
trigger =trigger42,885
services =services43,912
steps =steps44,935
environment =environment45,958
def lint_docs():lint_docs48,1000
def trigger_docs_main():trigger_docs_main63,1328
def trigger_docs_pr():trigger_docs_pr72,1478
pipelines/test_frontend.star,476
def test_frontend(trigger, ver_mode, edition = "oss"):test_frontend20,374
environment =environment32,794
init_steps =init_steps33,833
test_steps =test_steps41,1102
pipeline_name =pipeline_name45,1205
pipeline_name =pipeline_name47,1311
name =name49,1404
edition =edition50,1434
trigger =trigger51,1461
services =services52,1488
steps =steps53,1511
environment =environment54,1552
pipelines/integration_tests.star,483
def integration_tests(trigger, ver_mode, edition):integration_tests26,542
environment =environment37,900
services =services38,939
volumes =volumes39,989
init_steps =init_steps40,1039
test_steps =test_steps48,1282
name =name54,1412
edition =edition55,1468
trigger =trigger56,1493
services =services57,1520
steps =steps58,1549
environment =environment59,1590
volumes =volumes60,1625
pipelines/windows.star,954
def windows(trigger, edition, ver_mode):windows17,339
environment =environment29,798
init_cmds =init_cmds30,837
steps =steps38,1205
bucket =bucket49,1497
ver_part =ver_part51,1590
dir =dir52,1628
dir =dir54,1670
bucket =bucket55,1695
build_no =build_no56,1736
ver_part =ver_part57,1780
installer_commands =installer_commands58,1842
committish =committish100,3763
committish =committish102,3846
committish =committish104,3906
download_grabpl_step_cmds =download_grabpl_step_cmds107,4057
clone_cmds =clone_cmds113,4363
name =name146,5711
edition =edition147,5742
trigger =trigger148,5769
steps =steps149,5830
depends_on =depends_on150,5889
platform =platform151,6007
environment =environment152,6037
pipelines/lint_backend.star,418
def lint_backend_pipeline(trigger, ver_mode):lint_backend_pipeline18,306
environment =environment28,590
wire_step =wire_step29,627
init_steps =init_steps31,704
test_steps =test_steps36,809
name =name43,959
edition =edition44,1010
trigger =trigger45,1035
services =services46,1062
steps =steps47,1085
environment =environment48,1126
pipelines/publish_images.star,998
def publish_image_steps(edition, mode, docker_repo):publish_image_steps17,303
additional_docker_repo =additional_docker_repo31,922
additional_docker_repo =additional_docker_repo33,979
steps =steps34,1034
def publish_image_pipelines_public():publish_image_pipelines_public45,1369
mode =mode51,1521
trigger =trigger52,1541
name =name57,1641
trigger =trigger58,1694
steps =steps59,1721
edition =edition60,1813
environment =environment61,1835
name =name63,1894
trigger =trigger64,1954
steps =steps65,1981
edition =edition66,2091
environment =environment67,2113
def publish_image_pipelines_security():publish_image_pipelines_security70,2170
mode =mode71,2210
trigger =trigger72,2232
name =name77,2332
trigger =trigger78,2392
steps =steps79,2419
edition =edition80,2529
environment =environment81,2551
steps/lib.star,8579
grabpl_version =grabpl_version7,181
build_image =build_image8,208
publish_image =publish_image9,254
deploy_docker_image =deploy_docker_image10,304
alpine_image =alpine_image11,380
curl_image =curl_image12,411
windows_image =windows_image13,452
wix_image =wix_image14,501
go_image =go_image15,536
disable_tests =disable_tests17,564
trigger_oss =trigger_oss18,586
def slack_step(channel, template, secret):slack_step24,653
def yarn_install_step(edition = "oss"):yarn_install_step35,918
deps =deps36,958
deps =deps38,1004
def wire_install_step():wire_install_step48,1222
def identify_runner_step(platform = "linux"):identify_runner_step60,1454
def clone_enterprise_step(ver_mode):clone_enterprise_step78,1916
committish =committish87,2193
committish =committish89,2268
committish =committish91,2317
def init_enterprise_step(ver_mode):init_enterprise_step105,2747
source_commit =source_commit115,3098
source_commit =source_commit117,3151
environment =environment118,3191
token =token121,3280
environment =environment123,3369
token =token126,3458
environment =environment128,3518
token =token129,3543
def download_grabpl_step(platform = "linux"):download_grabpl_step148,4147
def lint_drone_step():lint_drone_step173,4973
def lint_starlark_step():lint_starlark_step185,5216
def enterprise_downstream_step(edition, ver_mode):enterprise_downstream_step206,6000
repo =repo219,6482
step =step225,6623
def lint_backend_step():lint_backend_step247,7248
def benchmark_ldap_step():benchmark_ldap_step265,7713
def build_storybook_step(edition, ver_mode):build_storybook_step278,8087
def store_storybook_step(edition, ver_mode, trigger = None):store_storybook_step300,8743
commands =commands314,9202
commands =commands323,9521
step =step325,9593
when_cond =when_cond338,10125
step =step346,10330
def e2e_tests_artifacts(edition):e2e_tests_artifacts349,10391
def upload_cdn_step(edition, ver_mode, trigger = None):upload_cdn_step386,12378
deps =deps397,12763
step =step407,12970
step =step420,13423
def build_backend_step(edition, ver_mode, variants = None):build_backend_step423,13482
variants_str =variants_str437,14070
variants_str =variants_str439,14109
cmds =cmds443,14256
build_no =build_no449,14418
cmds =cmds450,14461
def build_frontend_step(edition, ver_mode):build_frontend_step468,14906
build_no =build_no478,15246
cmds =cmds482,15356
cmds =cmds487,15505
def build_frontend_package_step(edition, ver_mode):build_frontend_package_step505,15960
build_no =build_no515,16312
cmds =cmds519,16422
cmds =cmds524,16580
def build_plugins_step(edition, ver_mode):build_plugins_step542,17053
env =env544,17121
env =env548,17220
def test_backend_step():test_backend_step563,17607
def test_backend_integration_step():test_backend_integration_step575,17880
def betterer_frontend_step(edition = "oss"):betterer_frontend_step587,18187
deps =deps596,18427
def test_frontend_step(edition = "oss"):test_frontend_step609,18728
deps =deps618,18962
def lint_frontend_step():lint_frontend_step634,19343
def test_a11y_frontend_step(ver_mode, edition, port = 3001):test_a11y_frontend_step652,19793
commands =commands664,20279
failure =failure667,20345
failure =failure672,20483
def frontend_metrics_step(edition, trigger = None):frontend_metrics_step693,21146
step =step706,21507
step =step721,22007
def codespell_step():codespell_step724,22066
def package_step(edition, ver_mode, variants = None):package_step736,22468
deps =deps750,23006
variants_str =variants_str757,23167
variants_str =variants_str759,23206
sign_args =sign_args762,23332
env =env763,23362
test_args =test_args769,23628
sign_args =sign_args771,23661
env =env772,23684
test_args =test_args773,23703
cmds =cmds777,23829
build_no =build_no784,24036
cmds =cmds785,24079
def grafana_server_step(edition, port = 3001):grafana_server_step798,24459
package_file_pfx =package_file_pfx808,24729
package_file_pfx =package_file_pfx810,24788
package_file_pfx =package_file_pfx812,24889
environment =environment814,24938
def e2e_tests_step(suite, edition, port = 3001, tries = None):e2e_tests_step837,25554
cmd =cmd838,25617
def cloud_plugins_e2e_tests_step(suite, edition, cloud, trigger = None):cloud_plugins_e2e_tests_step856,26186
environment =environment869,26649
when =when870,26670
when =when872,26700
environment =environment874,26748
when =when882,27129
branch =branch888,27345
step =step889,27401
step =step901,27822
def build_docs_website_step():build_docs_website_step904,27874
def copy_packages_for_docker_step(edition = None):copy_packages_for_docker_step916,28272
def build_docker_images_step(edition, archs = None, ubuntu = False, publish = False):build_docker_images_step929,28622
cmd =cmd943,29193
ubuntu_sfx =ubuntu_sfx947,29307
ubuntu_sfx =ubuntu_sfx949,29342
environment =environment955,29468
def fetch_images_step(edition):fetch_images_step979,30079
def publish_images_step(edition, ver_mode, mode, docker_repo, trigger = None):publish_images_step997,30745
name =name1013,31562
docker_repo =docker_repo1014,31585
mode =mode1016,31663
mode =mode1018,31709
environment =environment1020,31728
cmd =cmd1026,31912
deps =deps1029,32041
deps =deps1032,32147
name =name1035,32250
docker_repo =docker_repo1036,32273
cmd =cmd1038,32459
step =step1040,32565
step =step1052,32929
def postgres_integration_tests_step():postgres_integration_tests_step1056,32989
cmds =cmds1057,33028
def mysql_integration_tests_step():mysql_integration_tests_step1079,33850
cmds =cmds1080,33886
def redis_integration_tests_step():redis_integration_tests_step1100,34629
def memcached_integration_tests_step():memcached_integration_tests_step1114,35026
def release_canary_npm_packages_step(edition, trigger = None):release_canary_npm_packages_step1128,35435
step =step1141,35805
step =step1153,36143
def enterprise2_suffix(edition):enterprise2_suffix1156,36202
def upload_packages_step(edition, ver_mode, trigger = None):upload_packages_step1161,36320
deps =deps1176,36816
step =step1184,37036
step =step1195,37471
def publish_grafanacom_step(edition, ver_mode):publish_grafanacom_step1198,37530
cmd =cmd1211,38044
build_no =build_no1215,38188
cmd =cmd1216,38231
def publish_linux_packages_step(edition, package_manager = "deb"):publish_linux_packages_step1239,38866
def get_windows_steps(edition, ver_mode):get_windows_steps1261,39989
init_cmds =init_cmds1270,40281
steps =steps1278,40649
bucket =bucket1289,40941
ver_part =ver_part1291,41034
dir =dir1292,41072
dir =dir1294,41114
bucket =bucket1295,41139
build_no =build_no1296,41180
ver_part =ver_part1297,41224
installer_commands =installer_commands1298,41286
committish =committish1340,43207
committish =committish1342,43290
committish =committish1344,43350
download_grabpl_step_cmds =download_grabpl_step_cmds1347,43501
clone_cmds =clone_cmds1353,43807
def verify_gen_cue_step(edition):verify_gen_cue_step1387,45152
deps =deps1388,45186
def verify_gen_jsonnet_step(edition):verify_gen_jsonnet_step1402,45694
deps =deps1403,45732
def trigger_test_release():trigger_test_release1417,46236
def artifacts_page_step():artifacts_page_step1451,47731
def end_to_end_tests_deps():end_to_end_tests_deps1466,48058
def compile_build_cmd(edition = "oss"):compile_build_cmd1476,48321
dependencies =dependencies1477,48361
dependencies =dependencies1479,48432
def get_trigger_storybook(ver_mode):get_trigger_storybook1492,48780
trigger_storybook =trigger_storybook1500,49031
trigger_storybook =trigger_storybook1502,49088
trigger_storybook =trigger_storybook1506,49168
vault.star,444
pull_secret =pull_secret4,87
github_token =github_token5,120
drone_token =drone_token6,150
prerelease_bucket =prerelease_bucket7,178
gcp_upload_artifacts_key =gcp_upload_artifacts_key8,218
azure_sp_app_id =azure_sp_app_id9,272
azure_sp_app_pw =azure_sp_app_pw10,308
azure_tenant =azure_tenant11,344
def from_secret(secret):from_secret13,375
def vault_secret(name, path, key):vault_secret18,451
def secrets():secrets28,633
version.star,116
ver_mode =ver_mode12,197
trigger =trigger13,225
def version_branch_pipelines():version_branch_pipelines15,268

@ -2,15 +2,11 @@
This module provides functions for cronjob pipelines and steps used within.
"""
load("scripts/drone/vault.star", "from_secret")
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load("scripts/drone/vault.star", "from_secret")
aquasec_trivy_image = "aquasec/trivy:0.21.0"
@ -21,7 +17,6 @@ def cronjobs():
scan_docker_image_pipeline("latest-ubuntu"),
scan_docker_image_pipeline("main-ubuntu"),
scan_build_test_publish_docker_image_pipeline(),
grafana_com_nightly_pipeline(),
]
def authenticate_gcr_step():
@ -59,6 +54,10 @@ def cron_job_pipeline(cronName, name, steps):
"path": "/var/run/docker.sock",
},
},
{
"name": "config",
"temp": {},
},
],
}
@ -117,13 +116,13 @@ def scan_docker_image_unknown_low_medium_vulnerabilities_step(docker_image):
for key in images:
cmds = cmds + ["trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + images[key]]
else:
cmds = ["trivy --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + docker_image]
cmds = ["trivy image --exit-code 0 --severity UNKNOWN,LOW,MEDIUM " + docker_image]
return {
"name": "scan-unknown-low-medium-vulnerabilities",
"image": aquasec_trivy_image,
"commands": cmds,
"depends_on": ["authenticate-gcr"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}, {"name": "config", "path": "/root/.docker/"}],
}
def scan_docker_image_high_critical_vulnerabilities_step(docker_image):
@ -141,19 +140,22 @@ def scan_docker_image_high_critical_vulnerabilities_step(docker_image):
for key in images:
cmds = cmds + ["trivy --exit-code 1 --severity HIGH,CRITICAL " + images[key]]
else:
cmds = ["trivy --exit-code 1 --severity HIGH,CRITICAL " + docker_image]
cmds = ["trivy image --exit-code 1 --severity HIGH,CRITICAL " + docker_image]
return {
"name": "scan-high-critical-vulnerabilities",
"image": aquasec_trivy_image,
"commands": cmds,
"depends_on": ["authenticate-gcr"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"environment": {
"GOOGLE_APPLICATION_CREDENTIALS": from_secret("gcr_credentials_json"),
},
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}, {"name": "config", "path": "/root/.docker/"}],
}
def slack_job_failed_step(channel, image):
return {
"name": "slack-notify-failure",
"image": images["plugins_slack_image"],
"image": images["plugins_slack"],
"settings": {
"webhook": from_secret("slack_webhook_backend"),
"channel": channel,
@ -167,7 +169,7 @@ def slack_job_failed_step(channel, image):
def post_to_grafana_com_step():
return {
"name": "post-to-grafana-com",
"image": images["publish_image"],
"image": images["publish"],
"environment": {
"GRAFANA_COM_API_KEY": from_secret("grafana_api_key"),
"GCP_KEY": from_secret("gcp_key"),
@ -175,13 +177,3 @@ def post_to_grafana_com_step():
"depends_on": ["compile-build-cmd"],
"commands": ["./bin/build publish grafana-com --edition oss"],
}
def grafana_com_nightly_pipeline():
return cron_job_pipeline(
cronName = "grafana-com-nightly",
name = "grafana-com-nightly",
steps = [
compile_build_cmd(),
post_to_grafana_com_step(),
],
)

@ -3,10 +3,8 @@ This module returns all the pipelines used in the event of pushes to the main br
"""
load(
"scripts/drone/utils/utils.star",
"drone_change_template",
"failure_template",
"notify_pipeline",
"scripts/drone/pipelines/build.star",
"build_e2e",
)
load(
"scripts/drone/pipelines/docs.star",
@ -14,36 +12,37 @@ load(
"trigger_docs_main",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
)
load(
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
"scripts/drone/pipelines/lint_frontend.star",
"lint_frontend_pipeline",
)
load(
"scripts/drone/pipelines/build.star",
"build_e2e",
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/windows.star",
"windows",
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/trigger_downstream.star",
"enterprise_downstream_pipeline",
)
load(
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
"scripts/drone/pipelines/windows.star",
"windows",
)
load(
"scripts/drone/pipelines/lint_frontend.star",
"lint_frontend_pipeline",
"scripts/drone/utils/utils.star",
"failure_template",
"notify_pipeline",
)
ver_mode = "main"
@ -59,27 +58,12 @@ trigger = {
"latest.json",
],
},
"repo": [
"grafana/grafana",
],
}
def main_pipelines():
drone_change_trigger = {
"event": [
"push",
],
"branch": "main",
"repo": [
"grafana/grafana",
],
"paths": {
"include": [
".drone.yml",
],
"exclude": [
"exclude",
],
},
}
pipelines = [
docs_pipelines(ver_mode, trigger_docs_main()),
test_frontend(trigger, ver_mode),
@ -89,13 +73,6 @@ def main_pipelines():
build_e2e(trigger, ver_mode),
integration_tests(trigger, prefix = ver_mode, ver_mode = ver_mode),
windows(trigger, ver_mode = ver_mode),
notify_pipeline(
name = "notify-drone-changes",
slack_channel = "slack-webhooks-test",
trigger = drone_change_trigger,
template = drone_change_template,
secret = "drone-changes-webhook",
),
enterprise_downstream_pipeline(),
notify_pipeline(
name = "main-notify",

@ -4,45 +4,49 @@ It also includes a function generating a PR trigger from a list of included and
"""
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
"scripts/drone/pipelines/benchmarks.star",
"integration_benchmarks",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
"scripts/drone/pipelines/build.star",
"build_e2e",
)
load(
"scripts/drone/pipelines/docs.star",
"docs_pipelines",
"trigger_docs_pr",
)
load(
"scripts/drone/pipelines/integration_tests.star",
"integration_tests",
)
load(
"scripts/drone/pipelines/build.star",
"build_e2e",
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
)
load(
"scripts/drone/pipelines/verify_drone.star",
"verify_drone",
"scripts/drone/pipelines/lint_frontend.star",
"lint_frontend_pipeline",
)
load(
"scripts/drone/pipelines/verify_starlark.star",
"verify_starlark",
"scripts/drone/pipelines/shellcheck.star",
"shellcheck_pipeline",
)
load(
"scripts/drone/pipelines/docs.star",
"docs_pipelines",
"trigger_docs_pr",
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/shellcheck.star",
"shellcheck_pipeline",
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/lint_backend.star",
"lint_backend_pipeline",
"scripts/drone/pipelines/verify_drone.star",
"verify_drone",
)
load(
"scripts/drone/pipelines/lint_frontend.star",
"lint_frontend_pipeline",
"scripts/drone/pipelines/verify_starlark.star",
"verify_starlark",
)
ver_mode = "pr"
@ -130,10 +134,12 @@ def pr_pipelines():
],
),
prefix = ver_mode,
ver_mode = ver_mode,
),
docs_pipelines(ver_mode, trigger_docs_pr()),
shellcheck_pipeline(),
integration_benchmarks(
prefix = ver_mode,
),
]
def get_pr_trigger(include_paths = None, exclude_paths = None):

@ -2,53 +2,36 @@
This module returns all the pipelines used in the event of a release along with supporting functions.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/lib.star",
"build_backend_step",
"build_docker_images_step",
"build_frontend_package_step",
"build_frontend_step",
"build_plugins_step",
"build_storybook_step",
"compile_build_cmd",
"copy_packages_for_docker_step",
"download_grabpl_step",
"e2e_tests_artifacts",
"e2e_tests_step",
"get_windows_steps",
"grafana_server_step",
"identify_runner_step",
"mysql_integration_tests_step",
"package_step",
"postgres_integration_tests_step",
"memcached_integration_tests_steps",
"mysql_integration_tests_steps",
"postgres_integration_tests_steps",
"publish_grafanacom_step",
"publish_linux_packages_step",
"store_storybook_step",
"trigger_oss",
"upload_cdn_step",
"upload_packages_step",
"redis_integration_tests_steps",
"remote_alertmanager_integration_tests_steps",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
"yarn_install_step",
)
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/vault.star",
"from_secret",
@ -57,51 +40,13 @@ load(
"prerelease_bucket",
"rgm_gcp_key_base64",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/pipelines/whats_new_checker.star",
"whats_new_checker_pipeline",
)
ver_mode = "release"
release_trigger = {
"event": {
"exclude": [
"promote",
],
},
"ref": {
"include": [
"refs/tags/v*",
],
"exclude": [
"refs/tags/*-cloud*",
],
},
}
def store_npm_packages_step():
return {
"name": "store-npm-packages",
"image": images["build_image"],
"depends_on": [
"compile-build-cmd",
"build-frontend-packages",
],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret(prerelease_bucket),
},
"commands": ["./bin/build artifacts npm store --tag ${DRONE_TAG}"],
}
def retrieve_npm_packages_step():
return {
"name": "retrieve-npm-packages",
"image": images["publish_image"],
"image": images["publish"],
"depends_on": [
"compile-build-cmd",
"yarn-install",
@ -117,7 +62,7 @@ def retrieve_npm_packages_step():
def release_npm_packages_step():
return {
"name": "release-npm-packages",
"image": images["build_image"],
"image": images["go"],
"depends_on": [
"compile-build-cmd",
"retrieve-npm-packages",
@ -129,142 +74,10 @@ def release_npm_packages_step():
"commands": ["./bin/build artifacts npm release --tag ${DRONE_TAG}"],
}
def oss_pipelines(ver_mode = ver_mode, trigger = release_trigger):
"""Generates all pipelines used for Grafana OSS.
Args:
ver_mode: controls which steps are included in the pipeline.
Defaults to 'release'.
trigger: controls which events can trigger the pipeline execution.
Defaults to tag events for tags with a 'v' prefix.
Returns:
List of Drone pipelines.
"""
environment = {"EDITION": "oss"}
services = integration_test_services(edition = "oss")
volumes = integration_test_services_volumes()
init_steps = [
identify_runner_step(),
download_grabpl_step(),
verify_gen_cue_step(),
wire_install_step(),
yarn_install_step(),
compile_build_cmd(),
]
build_steps = [
build_backend_step(ver_mode = ver_mode),
build_frontend_step(ver_mode = ver_mode),
build_frontend_package_step(ver_mode = ver_mode),
build_plugins_step(ver_mode = ver_mode),
package_step(ver_mode = ver_mode),
copy_packages_for_docker_step(),
build_docker_images_step(publish = True),
build_docker_images_step(
publish = True,
ubuntu = True,
),
grafana_server_step(),
e2e_tests_step("dashboards-suite", tries = 3),
e2e_tests_step("smoke-tests-suite", tries = 3),
e2e_tests_step("panels-suite", tries = 3),
e2e_tests_step("various-suite", tries = 3),
e2e_tests_artifacts(),
build_storybook_step(ver_mode = ver_mode),
]
publish_steps = []
if ver_mode in (
"release",
"release-branch",
):
publish_steps.extend(
[
upload_cdn_step(ver_mode = ver_mode, trigger = trigger_oss),
upload_packages_step(
ver_mode = ver_mode,
trigger = trigger_oss,
),
],
)
if ver_mode in ("release",):
publish_steps.extend(
[
store_storybook_step(ver_mode = ver_mode),
store_npm_packages_step(),
],
)
integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
pipelines = []
# We don't need to run integration tests at release time since they have
# been run multiple times before:
if ver_mode in ("release"):
pipelines.append(whats_new_checker_pipeline(release_trigger))
integration_test_steps = []
volumes = []
windows_pipeline_dependencies = [
"{}-build-e2e-publish".format(ver_mode),
"{}-test-frontend".format(ver_mode),
]
pipelines.extend([
pipeline(
name = "{}-build-e2e-publish".format(ver_mode),
trigger = trigger,
services = [],
steps = init_steps + build_steps + publish_steps,
environment = environment,
volumes = volumes,
),
test_frontend(trigger, ver_mode),
test_backend(trigger, ver_mode),
])
if ver_mode not in ("release"):
pipelines.append(pipeline(
name = "{}-integration-tests".format(ver_mode),
trigger = trigger,
services = services,
steps = [
download_grabpl_step(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
wire_install_step(),
] +
integration_test_steps,
environment = environment,
volumes = volumes,
))
windows_pipeline = pipeline(
name = "{}-windows".format(ver_mode),
trigger = trigger,
steps = get_windows_steps(ver_mode = ver_mode),
platform = "windows",
depends_on = windows_pipeline_dependencies,
environment = environment,
)
pipelines.append(windows_pipeline)
return pipelines
def publish_artifacts_step():
return {
"name": "publish-artifacts",
"image": images["publish_image"],
"image": images["publish"],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -278,7 +91,7 @@ def publish_artifacts_step():
def publish_static_assets_step():
return {
"name": "publish-static-assets",
"image": images["publish_image"],
"image": images["publish"],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -293,7 +106,7 @@ def publish_static_assets_step():
def publish_storybook_step():
return {
"name": "publish-storybook",
"image": images["publish_image"],
"image": images["publish"],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret("prerelease_bucket"),
@ -405,15 +218,17 @@ def integration_test_pipelines():
}
pipelines = []
volumes = integration_test_services_volumes()
oss_integration_test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
integration_test_steps = postgres_integration_tests_steps() + \
mysql_integration_tests_steps("mysql57", "5.7") + \
mysql_integration_tests_steps("mysql80", "8.0") + \
redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
pipelines.append(pipeline(
name = "integration-tests",
trigger = trigger,
services = integration_test_services(edition = "oss"),
services = integration_test_services(),
steps = [
download_grabpl_step(),
identify_runner_step(),
@ -421,7 +236,7 @@ def integration_test_pipelines():
verify_gen_jsonnet_step(),
wire_install_step(),
] +
oss_integration_test_steps,
integration_test_steps,
environment = {"EDITION": "oss"},
volumes = volumes,
))
@ -433,7 +248,7 @@ def verify_release_pipeline(
bucket = from_secret(prerelease_bucket),
gcp_key = from_secret(rgm_gcp_key_base64),
version = "${DRONE_TAG}",
trigger = release_trigger,
trigger = {},
depends_on = [
"release-build-e2e-publish",
"release-windows",
@ -447,7 +262,7 @@ def verify_release_pipeline(
step = {
"name": "gsutil-stat",
"depends_on": ["clone"],
"image": images["cloudsdk_image"],
"image": images["cloudsdk"],
"environment": {
"BUCKET": bucket,
"GCP_KEY": gcp_key,

@ -0,0 +1,80 @@
"""
This module returns the pipeline used for integration benchmarks.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step",
"integration_benchmarks_step",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def integration_benchmarks(prefix):
"""Generate a pipeline for integration tests.
Args:
prefix: used in the naming of the pipeline.
Returns:
Drone pipeline.
"""
environment = {"EDITION": "oss"}
services = integration_test_services()
volumes = integration_test_services_volumes()
# In pull requests, attempt to clone grafana enterprise.
init_steps = [enterprise_setup_step(isPromote = True)]
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
# Ensure that verif_gen_cue happens after we clone enterprise
# At the time of writing this, very_gen_cue is depended on by the wire step which is what everything else depends on.
verify_step["depends_on"].append("clone-enterprise")
verify_jsonnet_step["depends_on"].append("clone-enterprise")
init_steps += [
compile_build_cmd(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
]
benchmark_steps = integration_benchmarks_step("sqlite") + \
integration_benchmarks_step("postgres", {
"PGPASSWORD": "grafanatest",
"GRAFANA_TEST_DB": "postgres",
"POSTGRES_HOST": "postgres",
}) + \
integration_benchmarks_step("mysql-5.7", {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": "mysql57",
}) + \
integration_benchmarks_step("mysql-8.0", {
"GRAFANA_TEST_DB": "mysql",
"MYSQL_HOST": "mysql80",
})
return pipeline(
name = "{}-integration-benchmarks".format(prefix),
trigger = {
"event": ["promote"],
"target": ["gobenchmarks"],
},
environment = environment,
services = services,
volumes = volumes,
steps = init_steps + benchmark_steps,
)

@ -2,15 +2,10 @@
load(
"scripts/drone/steps/lib.star",
"build_backend_step",
"build_docker_images_step",
"build_frontend_package_step",
"build_frontend_step",
"build_plugins_step",
"build_storybook_step",
"cloud_plugins_e2e_tests_step",
"compile_build_cmd",
"copy_packages_for_docker_step",
"download_grabpl_step",
"e2e_tests_artifacts",
"e2e_tests_step",
@ -18,12 +13,12 @@ load(
"frontend_metrics_step",
"grafana_server_step",
"identify_runner_step",
"package_step",
"publish_images_step",
"release_canary_npm_packages_step",
"store_storybook_step",
"test_a11y_frontend_step",
"trigger_oss",
"update_package_json_version",
"upload_cdn_step",
"upload_packages_step",
"verify_gen_cue_step",
@ -31,6 +26,15 @@ load(
"wire_install_step",
"yarn_install_step",
)
load(
"scripts/drone/steps/rgm.star",
"rgm_build_docker_step",
"rgm_package_step",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
@ -38,11 +42,11 @@ load(
# @unused
def build_e2e(trigger, ver_mode):
"""Perform e2e building, testing, and publishing."
"""Perform e2e building, testing, and publishing.
Args:
trigger: controls which events can trigger the pipeline execution.
ver_mode: used in the naming of the pipeline.
ver_mode: used in the naming of the pipeline. Either 'pr' or 'main'.
Returns:
Drone pipeline.
@ -64,17 +68,19 @@ def build_e2e(trigger, ver_mode):
if ver_mode == "pr":
build_steps.extend(
[
build_frontend_package_step(),
enterprise_downstream_step(ver_mode = ver_mode),
],
)
else:
build_steps.extend([
update_package_json_version(),
build_frontend_package_step(depends_on = ["update-package-json-version"]),
])
build_steps.extend(
[
build_backend_step(ver_mode = ver_mode),
build_frontend_step(ver_mode = ver_mode),
build_frontend_package_step(ver_mode = ver_mode),
build_plugins_step(ver_mode = ver_mode),
package_step(ver_mode = ver_mode),
rgm_package_step(distros = "linux/amd64,linux/arm64", file = "packages.txt"),
grafana_server_step(),
e2e_tests_step("dashboards-suite"),
e2e_tests_step("smoke-tests-suite"),
@ -87,7 +93,6 @@ def build_e2e(trigger, ver_mode):
),
e2e_tests_artifacts(),
build_storybook_step(ver_mode = ver_mode),
copy_packages_for_docker_step(),
test_a11y_frontend_step(ver_mode = ver_mode),
],
)
@ -97,12 +102,12 @@ def build_e2e(trigger, ver_mode):
[
store_storybook_step(trigger = trigger_oss, ver_mode = ver_mode),
frontend_metrics_step(trigger = trigger_oss),
build_docker_images_step(
publish = False,
),
build_docker_images_step(
publish = False,
ubuntu = True,
rgm_build_docker_step(
"packages.txt",
images["ubuntu"],
images["alpine"],
tag_format = "{{ .version_base }}-{{ .buildID }}-{{ .arch }}",
ubuntu_tag_format = "{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}",
),
publish_images_step(
docker_repo = "grafana",
@ -128,16 +133,12 @@ def build_e2e(trigger, ver_mode):
elif ver_mode == "pr":
build_steps.extend(
[
build_docker_images_step(
archs = [
"amd64",
],
),
build_docker_images_step(
archs = [
"amd64",
],
ubuntu = True,
rgm_build_docker_step(
"packages.txt",
images["ubuntu"],
images["alpine"],
tag_format = "{{ .version_base }}-{{ .buildID }}-{{ .arch }}",
ubuntu_tag_format = "{{ .version_base }}-{{ .buildID }}-ubuntu-{{ .arch }}",
),
publish_images_step(
docker_repo = "grafana",

@ -3,21 +3,21 @@ This module contains steps and pipelines relating to creating CI Docker images.
"""
load(
"scripts/drone/utils/utils.star",
"pipeline",
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/vault.star",
"from_secret",
"gcp_download_build_container_assets_key",
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/utils/windows_images.star",
"windows_images",
)
load(
"scripts/drone/utils/images.star",
"images",
"scripts/drone/vault.star",
"from_secret",
"gcp_download_build_container_assets_key",
)
def publish_ci_windows_test_image_pipeline():
@ -32,7 +32,7 @@ def publish_ci_windows_test_image_pipeline():
steps = [
{
"name": "clone",
"image": windows_images["wix_image"],
"image": windows_images["wix"],
"environment": {
"GITHUB_TOKEN": from_secret("github_token"),
},
@ -43,7 +43,7 @@ def publish_ci_windows_test_image_pipeline():
},
{
"name": "build-and-publish",
"image": windows_images["windows_server_core_image"],
"image": windows_images["windows_server_core"],
"environment": {
"DOCKER_USERNAME": from_secret("docker_username"),
"DOCKER_PASSWORD": from_secret("docker_password"),
@ -81,14 +81,14 @@ def publish_ci_build_container_image_pipeline():
steps = [
{
"name": "validate-version",
"image": images["alpine_image"],
"image": images["alpine"],
"commands": [
"if [ -z \"${BUILD_CONTAINER_VERSION}\" ]; then echo Missing BUILD_CONTAINER_VERSION; false; fi",
],
},
{
"name": "download-macos-sdk",
"image": images["cloudsdk_image"],
"image": images["cloudsdk"],
"environment": {
"GCP_KEY": from_secret(gcp_download_build_container_assets_key),
},
@ -100,7 +100,7 @@ def publish_ci_build_container_image_pipeline():
},
{
"name": "build-and-publish", # Consider splitting the build and the upload task.
"image": images["cloudsdk_image"],
"image": images["cloudsdk"],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"environment": {
"DOCKER_USERNAME": from_secret("docker_username"),

@ -7,6 +7,7 @@ load(
"build_docs_website_step",
"codespell_step",
"identify_runner_step",
"verify_gen_cue_step",
"yarn_install_step",
)
load(
@ -35,6 +36,7 @@ def docs_pipelines(ver_mode, trigger):
codespell_step(),
lint_docs(),
build_docs_website_step(),
verify_gen_cue_step(),
]
return pipeline(
@ -48,7 +50,7 @@ def docs_pipelines(ver_mode, trigger):
def lint_docs():
return {
"name": "lint-docs",
"image": images["build_image"],
"image": images["node"],
"depends_on": [
"yarn-install",
],
@ -66,6 +68,9 @@ def trigger_docs_main():
"event": [
"push",
],
"repo": [
"grafana/grafana",
],
"paths": docs_paths,
}
@ -74,5 +79,8 @@ def trigger_docs_pr():
"event": [
"pull_request",
],
"repo": [
"grafana/grafana",
],
"paths": docs_paths,
}

@ -2,29 +2,32 @@
This module returns the pipeline used for integration tests.
"""
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"download_grabpl_step",
"enterprise_setup_step",
"identify_runner_step",
"mysql_integration_tests_step",
"postgres_integration_tests_step",
"memcached_integration_tests_steps",
"mysql_integration_tests_steps",
"postgres_integration_tests_steps",
"redis_integration_tests_steps",
"remote_alertmanager_integration_tests_steps",
"verify_gen_cue_step",
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/services/services.star",
"integration_test_services",
"integration_test_services_volumes",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def integration_tests(trigger, prefix, ver_mode):
def integration_tests(trigger, prefix, ver_mode = "pr"):
"""Generate a pipeline for integration tests.
Args:
@ -37,11 +40,14 @@ def integration_tests(trigger, prefix, ver_mode):
"""
environment = {"EDITION": "oss"}
services = integration_test_services(edition = "oss")
services = integration_test_services()
volumes = integration_test_services_volumes()
init_steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
init_steps.append(enterprise_setup_step())
@ -50,15 +56,18 @@ def integration_tests(trigger, prefix, ver_mode):
download_grabpl_step(),
compile_build_cmd(),
identify_runner_step(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
]
test_steps = [
postgres_integration_tests_step(),
mysql_integration_tests_step(),
]
# test_steps = [a, b] + [c, d] + [e, f]...
test_steps = postgres_integration_tests_steps() + \
mysql_integration_tests_steps("mysql57", "5.7") + \
mysql_integration_tests_steps("mysql80", "8.0") + \
redis_integration_tests_steps() + \
memcached_integration_tests_steps() + \
remote_alertmanager_integration_tests_steps()
return pipeline(
name = "{}-integration-tests".format(prefix),

@ -44,6 +44,10 @@ def lint_backend_pipeline(trigger, ver_mode):
test_steps = [
lint_backend_step(),
# modowners doesn't exist for versions below 10.1.x.
# validate_modfile_step(),
# OpenAPI spec validation is disabled for versions below 10.2.x.
# validate_openapi_spec_step(),
]
if ver_mode == "main":

@ -27,6 +27,9 @@ def lint_frontend_pipeline(trigger, ver_mode):
environment = {"EDITION": "oss"}
init_steps = []
lint_step = lint_frontend_step()
# i18n step is disabled for versions below 10.1.x.
# i18n_step = verify_i18n_step()
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
@ -36,9 +39,10 @@ def lint_frontend_pipeline(trigger, ver_mode):
identify_runner_step(),
yarn_install_step(),
]
test_steps = [
lint_frontend_step(),
lint_step,
# i18n step is disabled for versions below 10.1.x.
# i18n_step,
]
return pipeline(

@ -3,14 +3,14 @@ This module returns a Drone step and pipeline for linting with shellcheck.
"""
load("scripts/drone/steps/lib.star", "compile_build_cmd")
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
trigger = {
"event": [
@ -29,12 +29,10 @@ trigger = {
def shellcheck_step():
return {
"name": "shellcheck",
"image": images["build_image"],
"depends_on": [
"compile-build-cmd",
],
"image": images["ubuntu"],
"commands": [
"./bin/build shellcheck",
"apt-get update -yq && apt-get install shellcheck",
"shellcheck -e SC1071 -e SC2162 scripts/**/*.sh",
],
}

@ -2,13 +2,8 @@
This module returns the pipeline used for testing backend code.
"""
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
"enterprise_setup_step",
"identify_runner_step",
"test_backend_integration_step",
@ -17,6 +12,10 @@ load(
"verify_gen_jsonnet_step",
"wire_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_backend(trigger, ver_mode):
"""Generates the pipeline used for testing OSS backend code.
@ -32,15 +31,17 @@ def test_backend(trigger, ver_mode):
steps = []
verify_step = verify_gen_cue_step()
verify_jsonnet_step = verify_gen_jsonnet_step()
if ver_mode == "pr":
# In pull requests, attempt to clone grafana enterprise.
steps.append(enterprise_setup_step())
steps += [
identify_runner_step(),
compile_build_cmd(),
verify_gen_cue_step(),
verify_gen_jsonnet_step(),
verify_step,
verify_jsonnet_step,
wire_install_step(),
test_backend_step(),
test_backend_integration_step(),

@ -2,19 +2,18 @@
This module returns the pipeline used for testing backend code.
"""
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/steps/lib.star",
"betterer_frontend_step",
"download_grabpl_step",
"enterprise_setup_step",
"identify_runner_step",
"test_frontend_step",
"yarn_install_step",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
)
def test_frontend(trigger, ver_mode):
"""Generates the pipeline used for testing frontend code.
@ -30,7 +29,6 @@ def test_frontend(trigger, ver_mode):
steps = [
identify_runner_step(),
download_grabpl_step(),
yarn_install_step(),
betterer_frontend_step(),
]

@ -2,14 +2,14 @@
This module contains logic for checking if the package.json whats new url matches with the in-flight tag.
"""
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/steps/lib.star",
"compile_build_cmd",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
@ -18,7 +18,7 @@ load(
def whats_new_checker_step():
return {
"name": "whats-new-checker",
"image": images["go_image"],
"image": images["go"],
"depends_on": [
"compile-build-cmd",
],

@ -3,15 +3,15 @@ This module returns the pipeline used for building Grafana on Windows.
"""
load(
"scripts/drone/utils/utils.star",
"pipeline",
"scripts/drone/steps/lib_windows.star",
"clone_step_windows",
"get_windows_steps",
"test_backend_step_windows",
"wire_install_step_windows",
)
load(
"scripts/drone/steps/lib.star",
"get_windows_steps",
"windows_clone_step",
"windows_test_backend_step",
"windows_wire_install_step",
"scripts/drone/utils/utils.star",
"pipeline",
)
load(
"scripts/drone/utils/windows_images.star",
@ -30,19 +30,19 @@ def windows_test_backend(trigger, edition, ver_mode):
"""
environment = {"EDITION": edition}
steps = [
windows_clone_step(),
clone_step_windows(),
]
steps.extend([{
"name": "windows-init",
"image": windows_images["windows_go_image"],
"image": windows_images["go"],
"depends_on": ["clone"],
"commands": [],
}])
steps.extend([
windows_wire_install_step(edition),
windows_test_backend_step(),
wire_install_step_windows(edition),
test_backend_step_windows(),
])
pl = pipeline(
name = "{}-test-backend-windows".format(ver_mode),

@ -4,50 +4,52 @@ rgm uses 'github.com/grafana/grafana-build' to build Grafana on the following ev
* A tag that begins with a 'v'
"""
load(
"scripts/drone/steps/lib.star",
"get_windows_steps",
)
load(
"scripts/drone/utils/utils.star",
"ignore_failure",
"pipeline",
)
load(
"scripts/drone/events/release.star",
"verify_release_pipeline",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/test_backend.star",
"test_backend",
)
load(
"scripts/drone/pipelines/test_frontend.star",
"test_frontend",
)
load(
"scripts/drone/pipelines/whats_new_checker.star",
"whats_new_checker_pipeline",
)
load(
"scripts/drone/steps/lib_windows.star",
"get_windows_steps",
)
load(
"scripts/drone/utils/images.star",
"images",
)
load(
"scripts/drone/utils/utils.star",
"pipeline",
"with_deps",
)
load(
"scripts/drone/variables.star",
"golang_version",
)
load(
"scripts/drone/vault.star",
"from_secret",
"npm_token",
"rgm_cdn_destination",
"rgm_dagger_token",
"rgm_destination",
"rgm_downloads_destination",
"rgm_gcp_key_base64",
"rgm_github_token",
"rgm_storybook_destination",
)
rgm_env_secrets = {
"GCP_KEY_BASE64": from_secret(rgm_gcp_key_base64),
"DESTINATION": from_secret(rgm_destination),
"GITHUB_TOKEN": from_secret(rgm_github_token),
"_EXPERIMENTAL_DAGGER_CLOUD_TOKEN": from_secret(rgm_dagger_token),
"GPG_PRIVATE_KEY": from_secret("packages_gpg_private_key"),
"GPG_PUBLIC_KEY": from_secret("packages_gpg_public_key"),
"GPG_PASSPHRASE": from_secret("packages_gpg_passphrase"),
}
docs_paths = {
"exclude": [
"*.md",
@ -73,75 +75,233 @@ tag_trigger = {
},
}
def rgm_build(script = "drone_publish_main.sh", canFail = True):
rgm_build_step = {
"name": "rgm-build",
main_trigger = {
"event": [
"push",
],
"branch": "main",
"paths": docs_paths,
"repo": [
"grafana/grafana",
],
}
nightly_trigger = {
"event": {
"include": [
"cron",
],
},
"cron": {
"include": [
"nightly-release",
],
},
}
version_branch_trigger = {"ref": ["refs/heads/v[0-9]*"]}
def rgm_env_secrets(env):
"""Adds the rgm secret ENV variables to the given env arg
Args:
env: A map of environment varables. This function will adds the necessary secrets to it (and potentially overwrite them).
Returns:
Drone step.
"""
env["DESTINATION"] = from_secret(rgm_destination)
env["STORYBOOK_DESTINATION"] = from_secret(rgm_storybook_destination)
env["CDN_DESTINATION"] = from_secret(rgm_cdn_destination)
env["DOWNLOADS_DESTINATION"] = from_secret(rgm_downloads_destination)
env["GCP_KEY_BASE64"] = from_secret(rgm_gcp_key_base64)
env["GITHUB_TOKEN"] = from_secret(rgm_github_token)
env["_EXPERIMENTAL_DAGGER_CLOUD_TOKEN"] = from_secret(rgm_dagger_token)
env["GPG_PRIVATE_KEY"] = from_secret("packages_gpg_private_key")
env["GPG_PUBLIC_KEY"] = from_secret("packages_gpg_public_key")
env["GPG_PASSPHRASE"] = from_secret("packages_gpg_passphrase")
env["DOCKER_USERNAME"] = from_secret("docker_username")
env["DOCKER_PASSWORD"] = from_secret("docker_password")
env["NPM_TOKEN"] = from_secret(npm_token)
env["GCOM_API_KEY"] = from_secret("grafana_api_key")
return env
def rgm_run(name, script):
"""Returns a pipeline that does a full build & package of Grafana.
Args:
name: The name of the pipeline step.
script: The script in the container to run.
Returns:
Drone step.
"""
env = {
"GO_VERSION": golang_version,
}
rgm_run_step = {
"name": name,
"image": "grafana/grafana-build:main",
"pull": "always",
"commands": [
"export GRAFANA_DIR=$$(pwd)",
"cd /src && ./scripts/{}".format(script),
],
"environment": rgm_env_secrets,
"environment": rgm_env_secrets(env),
# The docker socket is a requirement for running dagger programs
# In the future we should find a way to use dagger without mounting the docker socket.
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
}
if canFail:
rgm_build_step["failure"] = "ignore"
return [
rgm_build_step,
rgm_run_step,
]
def rgm_main():
trigger = {
"event": [
"push",
],
"branch": "main",
"paths": docs_paths,
"repo": [
"grafana/grafana",
],
def rgm_copy(src, dst):
"""Copies file from/to GCS.
Args:
src: source of the files.
dst: destination of the files.
Returns:
Drone steps.
"""
commands = [
"printenv GCP_KEY_BASE64 | base64 -d > /tmp/key.json",
"gcloud auth activate-service-account --key-file=/tmp/key.json",
"gcloud storage cp -r {} {}".format(src, dst),
]
if not dst.startswith("gs://"):
commands.insert(0, "mkdir -p {}".format(dst))
rgm_copy_step = {
"name": "rgm-copy",
"image": "google/cloud-sdk:alpine",
"commands": commands,
"environment": rgm_env_secrets({}),
}
return [
rgm_copy_step,
]
def rgm_publish_packages(bucket = "grafana-packages"):
"""Publish deb and rpm packages.
Args:
bucket: target bucket to publish the packages.
Returns:
Drone steps.
"""
steps = []
for package_manager in ["deb", "rpm"]:
steps.append({
"name": "publish-{}".format(package_manager),
# See https://github.com/grafana/deployment_tools/blob/master/docker/package-publish/README.md for docs on that image
"image": images["package_publish"],
"privileged": True,
"settings": {
"access_key_id": from_secret("packages_access_key_id"),
"secret_access_key": from_secret("packages_secret_access_key"),
"service_account_json": from_secret("packages_service_account"),
"target_bucket": bucket,
"gpg_passphrase": from_secret("packages_gpg_passphrase"),
"gpg_public_key": from_secret("packages_gpg_public_key"),
"gpg_private_key": from_secret("packages_gpg_private_key"),
"package_path": "file:///drone/src/dist/*.{}".format(package_manager),
},
})
return steps
def rgm_main():
# Runs a package / build process (with some distros) when commits are merged to main
return pipeline(
name = "rgm-main-prerelease",
trigger = trigger,
steps = rgm_build(canFail = True),
trigger = main_trigger,
steps = rgm_run("rgm-build", "drone_publish_main.sh"),
depends_on = ["main-test-backend", "main-test-frontend"],
)
def rgm_tag():
# Runs a package / build process (with all distros) when a tag is made
return pipeline(
name = "rgm-tag-prerelease",
trigger = tag_trigger,
steps = rgm_build(script = "drone_publish_tag_grafana.sh", canFail = False),
steps = rgm_run("rgm-build", "drone_publish_tag_grafana.sh"),
depends_on = ["release-test-backend", "release-test-frontend"],
)
def rgm_windows():
def rgm_tag_windows():
return pipeline(
name = "rgm-tag-prerelease-windows",
trigger = tag_trigger,
steps = ignore_failure(
get_windows_steps(
ver_mode = "release",
bucket = "grafana-prerelease",
),
steps = get_windows_steps(
ver_mode = "release",
bucket = "grafana-prerelease",
),
depends_on = ["rgm-tag-prerelease"],
platform = "windows",
)
def rgm():
def rgm_version_branch():
# Runs a package / build proces (with all distros) when a commit lands on a version branch
return pipeline(
name = "rgm-version-branch-prerelease",
trigger = version_branch_trigger,
steps = rgm_run("rgm-build", "drone_publish_tag_grafana.sh"),
depends_on = ["release-test-backend", "release-test-frontend"],
)
def rgm_nightly_build():
src = "$${DRONE_WORKSPACE}/dist/*"
dst = "$${DESTINATION}/$${DRONE_BUILD_EVENT}"
copy_steps = with_deps(rgm_copy(src, dst), ["rgm-build"])
return pipeline(
name = "rgm-nightly-build",
trigger = nightly_trigger,
steps = rgm_run("rgm-build", "drone_build_nightly_grafana.sh") + copy_steps,
depends_on = ["nightly-test-backend", "nightly-test-frontend"],
)
def rgm_nightly_publish():
"""Nightly publish pipeline.
Returns:
Drone pipeline.
"""
src = "$${DESTINATION}/$${DRONE_BUILD_EVENT}/*_$${DRONE_BUILD_NUMBER}_*"
dst = "$${DRONE_WORKSPACE}/dist"
publish_steps = with_deps(rgm_run("rgm-publish", "drone_publish_nightly_grafana.sh"), ["rgm-copy"])
package_steps = with_deps(rgm_publish_packages(), ["rgm-publish"])
return pipeline(
name = "rgm-nightly-publish",
trigger = nightly_trigger,
steps = rgm_copy(src, dst) + publish_steps + package_steps,
depends_on = ["rgm-nightly-build"],
)
def rgm_nightly_pipeline():
return [
test_frontend(nightly_trigger, "nightly"),
test_backend(nightly_trigger, "nightly"),
rgm_nightly_build(),
rgm_nightly_publish(),
]
def rgm_tag_pipeline():
return [
whats_new_checker_pipeline(tag_trigger),
test_frontend(tag_trigger, "release"),
test_backend(tag_trigger, "release"),
rgm_main(),
rgm_tag(),
rgm_windows(),
rgm_tag_windows(),
verify_release_pipeline(
trigger = tag_trigger,
name = "rgm-tag-verify-prerelease-assets",
@ -152,3 +312,29 @@ def rgm():
],
),
]
def rgm_version_branch_pipeline():
return [
rgm_version_branch(),
verify_release_pipeline(
trigger = version_branch_trigger,
name = "rgm-prerelease-verify-prerelease-assets",
bucket = "grafana-prerelease",
depends_on = [
"rgm-version-branch-prerelease",
],
),
]
def rgm_main_pipeline():
return [
rgm_main(),
]
def rgm():
return (
rgm_main_pipeline() +
rgm_tag_pipeline() +
rgm_version_branch_pipeline() +
rgm_nightly_pipeline()
)

@ -10,14 +10,15 @@ load(
def integration_test_services_volumes():
return [
{"name": "postgres", "temp": {"medium": "memory"}},
{"name": "mysql", "temp": {"medium": "memory"}},
{"name": "mysql57", "temp": {"medium": "memory"}},
{"name": "mysql80", "temp": {"medium": "memory"}},
]
def integration_test_services(edition):
def integration_test_services():
services = [
{
"name": "postgres",
"image": images["postgres_alpine_image"],
"image": images["postgres_alpine"],
"environment": {
"POSTGRES_USER": "grafanatest",
"POSTGRES_PASSWORD": "grafanatest",
@ -29,40 +30,53 @@ def integration_test_services(edition):
],
},
{
"name": "mysql",
"image": images["mysql5_image"],
"name": "mysql57",
"image": images["mysql5"],
"environment": {
"MYSQL_ROOT_PASSWORD": "rootpass",
"MYSQL_DATABASE": "grafana_tests",
"MYSQL_USER": "grafana",
"MYSQL_PASSWORD": "password",
},
"volumes": [{"name": "mysql", "path": "/var/lib/mysql"}],
"volumes": [{"name": "mysql57", "path": "/var/lib/mysql"}],
"commands": ["docker-entrypoint.sh mysqld --character-set-server=utf8mb4 --collation-server=utf8mb4_unicode_ci"],
},
{
"name": "mysql80",
"image": images["mysql8"],
"environment": {
"MYSQL_ROOT_PASSWORD": "rootpass",
"MYSQL_DATABASE": "grafana_tests",
"MYSQL_USER": "grafana",
"MYSQL_PASSWORD": "password",
},
"volumes": [{"name": "mysql80", "path": "/var/lib/mysql"}],
"commands": ["docker-entrypoint.sh mysqld --default-authentication-plugin=mysql_native_password"],
},
{
"name": "mimir_backend",
"image": images["mimir"],
"environment": {},
"commands": ["/bin/mimir -target=backend"],
},
{
"name": "redis",
"image": images["redis_alpine"],
"environment": {},
},
{
"name": "memcached",
"image": images["memcached_alpine"],
"environment": {},
},
]
if edition in ("enterprise", "enterprise2"):
services.extend(
[
{
"name": "redis",
"image": "redis:6.2.1-alpine",
"environment": {},
},
{
"name": "memcached",
"image": "memcached:1.6.9-alpine",
"environment": {},
},
],
)
return services
def ldap_service():
return {
"name": "ldap",
"image": images["openldap_image"],
"image": images["openldap"],
"environment": {
"LDAP_ADMIN_PASSWORD": "grafana",
"LDAP_DOMAIN": "grafana.org",

File diff suppressed because it is too large Load Diff

@ -0,0 +1,187 @@
"""
This module is a library of Drone steps that exclusively run on windows machines.
"""
load(
"scripts/drone/utils/windows_images.star",
"windows_images",
)
load(
"scripts/drone/variables.star",
"grabpl_version",
)
load(
"scripts/drone/vault.star",
"from_secret",
"gcp_grafanauploads_base64",
"prerelease_bucket",
)
def identify_runner_step_windows():
return {
"name": "identify-runner",
"image": windows_images["1809"],
"commands": [
"echo $env:DRONE_RUNNER_NAME",
],
}
def get_windows_steps(ver_mode, bucket = "%PRERELEASE_BUCKET%"):
"""Generate the list of Windows steps.
Args:
ver_mode: used to differentiate steps for different version modes.
bucket: used to override prerelease bucket.
Returns:
List of Drone steps.
"""
steps = [
identify_runner_step_windows(),
]
init_cmds = [
'$$ProgressPreference = "SilentlyContinue"',
"Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe".format(
grabpl_version,
),
]
steps.extend(
[
{
"name": "windows-init",
"image": windows_images["wix"],
"commands": init_cmds,
},
],
)
if ver_mode in (
"release",
"release-branch",
):
gcp_bucket = "{}/artifacts/downloads".format(bucket)
if ver_mode == "release":
ver_part = "${DRONE_TAG}"
dir = "release"
else:
dir = "main"
gcp_bucket = "grafana-downloads"
build_no = "DRONE_BUILD_NUMBER"
ver_part = "--build-id $$env:{}".format(build_no)
installer_commands = [
"$$gcpKey = $$env:GCP_KEY",
"[System.Text.Encoding]::UTF8.GetString([System.Convert]::FromBase64String($$gcpKey)) > gcpkey.json",
# gcloud fails to read the file unless converted with dos2unix
"dos2unix gcpkey.json",
"gcloud auth activate-service-account --key-file=gcpkey.json",
"rm gcpkey.json",
"cp C:\\App\\nssm-2.24.zip .",
]
if ver_mode in ("release",):
version = "${DRONE_TAG:1}"
installer_commands.extend(
[
".\\grabpl.exe windows-installer --target {} --edition oss {}".format(
"gs://{}/{}/oss/{}/grafana-{}.windows-amd64.zip".format(gcp_bucket, ver_part, ver_mode, version),
ver_part,
),
'$$fname = ((Get-Childitem grafana*.msi -name) -split "`n")[0]',
],
)
if ver_mode == "main":
installer_commands.extend(
[
"gsutil cp $$fname gs://{}/oss/{}/".format(gcp_bucket, dir),
'gsutil cp "$$fname.sha256" gs://{}/oss/{}/'.format(
gcp_bucket,
dir,
),
],
)
else:
installer_commands.extend(
[
"gsutil cp $$fname gs://{}/{}/oss/{}/".format(
gcp_bucket,
ver_part,
dir,
),
'gsutil cp "$$fname.sha256" gs://{}/{}/oss/{}/'.format(
gcp_bucket,
ver_part,
dir,
),
],
)
steps.append(
{
"name": "build-windows-installer",
"image": windows_images["wix"],
"depends_on": [
"windows-init",
],
"environment": {
"GCP_KEY": from_secret(gcp_grafanauploads_base64),
"PRERELEASE_BUCKET": from_secret(prerelease_bucket),
"GITHUB_TOKEN": from_secret("github_token"),
},
"commands": installer_commands,
},
)
return steps
def download_grabpl_step_windows():
return {
"name": "grabpl",
"image": windows_images["wix"],
"commands": [
'$$ProgressPreference = "SilentlyContinue"',
"Invoke-WebRequest https://grafana-downloads.storage.googleapis.com/grafana-build-pipeline/{}/windows/grabpl.exe -OutFile grabpl.exe".format(
grabpl_version,
),
],
}
def test_backend_step_windows():
# TODO: This is mostly a duplicate of "test_backend_step" in lib.star; but this file can't import that one,
# otherwise it creates an import cycle.
return {
"name": "test-backend",
"image": windows_images["go"],
"depends_on": [
"wire-install",
],
"commands": [
"go test -tags requires_buildifer -short -covermode=atomic -timeout=5m ./pkg/...",
],
}
def clone_step_windows():
return {
"name": "clone",
"image": windows_images["wix"],
"environment": {
"GITHUB_TOKEN": from_secret("github_token"),
},
"commands": [
'git clone "https://$$env:GITHUB_TOKEN@github.com/$$env:DRONE_REPO.git" .',
"git checkout -f $$env:DRONE_COMMIT",
],
}
def wire_install_step_windows(edition):
return {
"name": "wire-install",
"image": windows_images["go"],
"commands": [
"go install github.com/google/wire/cmd/wire@v0.5.0",
"wire gen -tags {} ./pkg/server".format(edition),
],
"depends_on": [
"windows-init",
],
}

@ -0,0 +1,61 @@
"""
Individual steps that use 'grafana-build' to replace existing individual steps.
These aren't used in releases.
"""
load(
"scripts/drone/variables.star",
"golang_version",
)
# rgm_package_step will create a tar.gz for use in e2e tests or other PR testing related activities..
def rgm_package_step(distros = "linux/amd64,linux/arm64", file = "packages.txt"):
return {
"name": "rgm-package",
"image": "grafana/grafana-build:main",
"pull": "always",
"depends_on": ["yarn-install"],
"commands": [
"/src/grafana-build package --distro={} ".format(distros) +
"--go-version={} ".format(golang_version) +
"--yarn-cache=$$YARN_CACHE_FOLDER " +
"--build-id=$$DRONE_BUILD_NUMBER " +
"--grafana-dir=$$PWD > {}".format(file),
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
}
# rgm_build_backend will create compile the grafana backend for various platforms. It's preferred to use
# 'rgm_package_step' if you creating a "usable" artifact. This should really only be used to verify that the code is
# compilable.
def rgm_build_backend_step(distros = "linux/amd64,linux/arm64"):
return {
"name": "rgm-package",
"image": "grafana/grafana-build:main",
"pull": "always",
"commands": [
"/src/grafana-build build " +
"--go-version={} ".format(golang_version) +
"--distro={} --grafana-dir=$$PWD".format(distros),
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
}
def rgm_build_docker_step(packages, ubuntu, alpine, depends_on = ["rgm-package"], file = "docker.txt", tag_format = "{{ .version }}-{{ .arch }}", ubuntu_tag_format = "{{ .version }}-ubuntu-{{ .arch }}"):
return {
"name": "rgm-build-docker",
"image": "grafana/grafana-build:main",
"pull": "always",
"commands": [
"docker run --privileged --rm tonistiigi/binfmt --install all",
"/src/grafana-build docker " +
"$(cat {} | grep tar.gz | grep -v docker | grep -v sha256 | awk '{{print \"--package=\" $0}}') ".format(packages) +
"--ubuntu-base={} ".format(ubuntu) +
"--alpine-base={} ".format(alpine) +
"--tag-format='{}' ".format(tag_format) +
"--ubuntu-tag-format='{}' > {}".format(ubuntu_tag_format, file),
"find ./dist -name '*docker*.tar.gz' -type f | xargs -n1 docker load -i",
],
"volumes": [{"name": "docker", "path": "/var/run/docker.sock"}],
"depends_on": depends_on,
}

@ -2,24 +2,35 @@
This module contains all the docker images that are used to build test and publish Grafana.
"""
load(
"scripts/drone/variables.star",
"golang_version",
"nodejs_version",
)
images = {
"cloudsdk_image": "google/cloud-sdk:431.0.0",
"build_image": "grafana/build-container:1.7.4",
"publish_image": "grafana/grafana-ci-deploy:1.3.3",
"alpine_image": "alpine:3.18.3",
"curl_image": "byrnedo/alpine-curl:0.1.8",
"go_image": "golang:1.20.10",
"plugins_slack_image": "plugins/slack",
"postgres_alpine_image": "postgres:12.3-alpine",
"mysql5_image": "mysql:5.7.39",
"mysql8_image": "mysql:8.0.32",
"redis_alpine_image": "redis:6.2.11-alpine",
"memcached_alpine_image": "memcached:1.6.9-alpine",
"package_publish_image": "us.gcr.io/kubernetes-dev/package-publish:latest",
"openldap_image": "osixia/openldap:1.4.0",
"drone_downstream_image": "grafana/drone-downstream",
"docker_puppeteer_image": "grafana/docker-puppeteer:1.1.0",
"docs_image": "grafana/docs-base:dbd975af06",
"cypress_image": "cypress/included:9.5.1-node16.14.0-slim-chrome99-ff97",
"cloud_datasources_e2e_image": "us-docker.pkg.dev/grafanalabs-dev/cloud-data-sources/e2e:latest",
"git": "alpine/git:2.40.1",
"go": "golang:{}-alpine".format(golang_version),
"node": "node:{}-alpine".format(nodejs_version),
"cloudsdk": "google/cloud-sdk:431.0.0",
"publish": "grafana/grafana-ci-deploy:1.3.3",
"alpine": "alpine:3.18.3",
"ubuntu": "ubuntu:22.04",
"curl": "byrnedo/alpine-curl:0.1.8",
"plugins_slack": "plugins/slack",
"python": "python:3.8",
"postgres_alpine": "postgres:12.3-alpine",
"mimir": "grafana/mimir:latest",
"mysql5": "mysql:5.7.39",
"mysql8": "mysql:8.0.32",
"redis_alpine": "redis:6.2.11-alpine",
"memcached_alpine": "memcached:1.6.9-alpine",
"package_publish": "us.gcr.io/kubernetes-dev/package-publish:latest",
"openldap": "osixia/openldap:1.4.0",
"drone_downstream": "grafana/drone-downstream",
"docker_puppeteer": "grafana/docker-puppeteer:1.1.0",
"docs": "grafana/docs-base:dbd975af06",
"cypress": "cypress/included:9.5.1-node16.14.0-slim-chrome99-ff97",
"dockerize": "jwilder/dockerize:0.6.1",
"shellcheck": "koalaman/shellcheck:stable",
}

@ -9,7 +9,6 @@ load(
load("scripts/drone/vault.star", "pull_secret")
failure_template = "Build {{build.number}} failed for commit: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>: {{build.link}}\nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nAuthor: {{build.author}}"
drone_change_template = "`.drone.yml` and `starlark` files have been changed on the OSS repo, by: {{build.author}}. \nBranch: <https://github.com/{{ repo.owner }}/{{ repo.name }}/commits/{{ build.branch }}|{{ build.branch }}>\nCommit hash: <https://github.com/{{repo.owner}}/{{repo.name}}/commit/{{build.commit}}|{{ truncate build.commit 8 }}>"
def pipeline(
name,
@ -31,7 +30,7 @@ def pipeline(
name: controls the pipeline name.
trigger: a Drone trigger for the pipeline.
steps: the Drone steps for the pipeline.
services: auxilliary services used during the pipeline.
services: auxiliary services used during the pipeline.
Defaults to [].
platform: abstracts platform specific configuration primarily for different Drone behavior on Windows.
Defaults to 'linux'.

@ -4,9 +4,14 @@ All the windows images needed to be in a different file than the other images, s
by trivy. Related issue: https://github.com/aquasecurity/trivy/issues/1392
"""
load(
"scripts/drone/variables.star",
"golang_version",
)
windows_images = {
"1809_image": "mcr.microsoft.com/windows:1809",
"wix_image": "grafana/ci-wix:0.1.1",
"windows_server_core_image": "docker:windowsservercore-1809",
"windows_go_image": "grafana/grafana-ci-windows-test:0.1.0",
"1809": "mcr.microsoft.com/windows:1809",
"wix": "grafana/ci-wix:0.1.1",
"windows_server_core": "docker:windowsservercore-1809",
"go": "golang:{}-windowsservercore-1809".format(golang_version),
}

@ -0,0 +1,9 @@
"""
global variables
"""
grabpl_version = "v3.0.42"
golang_version = "1.20.10"
# nodejs_version should match what's in ".nvmrc", but without the v prefix.
nodejs_version = "18.12.0"

@ -14,6 +14,9 @@ azure_tenant = "azure_tenant"
rgm_gcp_key_base64 = "gcp_key_base64"
rgm_destination = "destination"
rgm_storybook_destination = "rgm_storybook_destination"
rgm_cdn_destination = "rgm_cdn_destination"
rgm_downloads_destination = "rgm_downloads_destination"
rgm_github_token = "github_token"
rgm_dagger_token = "dagger_token"
@ -122,6 +125,21 @@ def secrets():
"infra/data/ci/grafana-release-eng/rgm",
"destination_prod",
),
vault_secret(
rgm_storybook_destination,
"infra/data/ci/grafana-release-eng/rgm",
"storybook_destination",
),
vault_secret(
rgm_cdn_destination,
"infra/data/ci/grafana-release-eng/rgm",
"cdn_destination",
),
vault_secret(
rgm_downloads_destination,
"infra/data/ci/grafana-release-eng/rgm",
"downloads_destination",
),
vault_secret(
rgm_dagger_token,
"infra/data/ci/grafana-release-eng/rgm",
@ -143,21 +161,6 @@ def secrets():
"infra/data/ci/grafana-release-eng/grafana-delivery-bot",
"app-private-key",
),
vault_secret(
rgm_gcp_key_base64,
"infra/data/ci/grafana-release-eng/rgm",
"gcp_service_account_base64",
),
vault_secret(
rgm_destination,
"infra/data/ci/grafana-release-eng/rgm",
"destination",
),
vault_secret(
rgm_github_token,
"infra/data/ci/github/grafanabot",
"pat",
),
vault_secret(
"gcr_credentials",
"secret/data/common/gcr",

@ -1,16 +0,0 @@
"""
This module returns the pipeline used for version branches.
"""
load(
"scripts/drone/events/release.star",
"oss_pipelines",
)
ver_mode = "release-branch"
trigger = {"ref": ["refs/heads/v[0-9]*"]}
def version_branch_pipelines():
return (
oss_pipelines(ver_mode = ver_mode, trigger = trigger)
)

@ -0,0 +1,80 @@
#!/bin/bash
# This script is used to validate the npm packages that are published to npmjs.org are in the correct format.
# It won't catch things like malformed JS or Types but it will assert that the package has
# the correct files and package.json properties.
ARTIFACTS_DIR="./npm-artifacts"
for file in "$ARTIFACTS_DIR"/*.tgz; do
echo "🔍 Checking NPM package: $file"
# get filename then strip everything after package name.
dir_name=$(basename "$file" .tgz | sed -E 's/@([a-zA-Z0-9-]+)-[0-9]+\.[0-9]+\.[0-9]+(-[a-zA-Z0-9-]+)?/\1/')
mkdir -p "./npm-artifacts/$dir_name"
tar -xzf "$file" -C "./npm-artifacts/$dir_name" --strip-components=1
# Make sure the tar wasn't empty
if [ ! -d "./npm-artifacts/$dir_name" ]; then
echo -e "❌ Failed: Empty package $dir_name.\n"
exit 1
fi
# Navigate inside the new extracted directory
pushd "./npm-artifacts/$dir_name" || exit
# Check for required files
check_files=("package.json" "README.md" "CHANGELOG.md" "LICENSE_APACHE2")
for check_file in "${check_files[@]}"; do
if [ ! -f "$check_file" ]; then
echo -e "❌ Failed: Missing required file $check_file in package $dir_name.\n"
exit 1
fi
done
# @grafana/toolkit structure is different to the other packages
if [[ "$dir_name" == "grafana-toolkit" ]]; then
if [ ! -d bin ] || [ ! -f bin/grafana-toolkit.js ]; then
echo -e "❌ Failed: Missing 'bin' directory or required files in package $dir_name.\n"
exit 1
fi
echo -e "✅ Passed: package checks for $file.\n"
popd || exit
continue
fi
# Assert commonjs builds
if [ ! -d dist ] || [ ! -f dist/index.js ] || [ ! -f dist/index.d.ts ]; then
echo -e "❌ Failed: Missing 'dist' directory or required commonjs files in package $dir_name.\n"
exit 1
fi
if [ "$(jq -r '.main' package.json)" != "dist/index.js" ] || \
[ "$(jq -r '.types' package.json)" != "dist/index.d.ts" ]; then
echo -e "❌ Failed: Incorrect package.json properties in package $dir_name.\n"
exit 1
fi
# Assert esm builds
esm_packages=("grafana-data" "grafana-ui" "grafana-runtime" "grafana-e2e-selectors" "grafana-schema")
for esm_package in "${esm_packages[@]}"; do
if [[ "$dir_name" == "$esm_package" ]]; then
if [ ! -d dist/esm ] || [ ! -f dist/esm/index.js ]; then
echo -e "❌ Failed: Missing 'dist/esm' directory or required esm files in package $dir_name.\n"
exit 1
fi
if [ "$(jq -r '.module' package.json)" != "dist/esm/index.js" ]; then
echo -e "❌ Failed: Incorrect package.json properties in package $dir_name.\n"
exit 1
fi
fi
done
echo -e "✅ Passed: package checks for $file.\n"
popd || exit
done
echo "🚀 All NPM package checks passed! 🚀"
rm -rf "${ARTIFACTS_DIR:?}/"*/
exit 0
Loading…
Cancel
Save