{
const styles = getStyles(theme);
return (
-
+ // When context is open, the position has to be NOT relative.
+ // Setting the postion as inline-style to overwrite the more sepecific style definition from `style.logsRowMessage`.
+
diff --git a/packages/grafana-ui/src/components/Logs/LogRowMessageDetectedFields.tsx b/packages/grafana-ui/src/components/Logs/LogRowMessageDetectedFields.tsx
index 15415f0a406..51e00ebe3b6 100644
--- a/packages/grafana-ui/src/components/Logs/LogRowMessageDetectedFields.tsx
+++ b/packages/grafana-ui/src/components/Logs/LogRowMessageDetectedFields.tsx
@@ -1,4 +1,4 @@
-import { cx, css } from '@emotion/css';
+import { css } from '@emotion/css';
import React, { PureComponent } from 'react';
import { LogRowModel, Field, LinkModel } from '@grafana/data';
@@ -19,12 +19,11 @@ class UnThemedLogRowMessageDetectedFields extends PureComponent
{
render() {
const { row, showDetectedFields, getFieldLinks, wrapLogMessage } = this.props;
const fields = getAllFields(row, getFieldLinks);
- const wrapClassName = cx(
- wrapLogMessage &&
- css`
- white-space: pre-wrap;
- `
- );
+ const wrapClassName = wrapLogMessage
+ ? ''
+ : css`
+ white-space: nowrap;
+ `;
const line = showDetectedFields
.map((parsedKey) => {
diff --git a/packages/grafana-ui/src/components/Tags/Tag.tsx b/packages/grafana-ui/src/components/Tags/Tag.tsx
index 362fc04a26d..eee65fa0cd8 100644
--- a/packages/grafana-ui/src/components/Tags/Tag.tsx
+++ b/packages/grafana-ui/src/components/Tags/Tag.tsx
@@ -64,15 +64,13 @@ const getTagStyles = (theme: GrafanaTheme, name: string, colorIndex?: number) =>
font-weight: ${theme.typography.weight.semibold};
font-size: ${theme.typography.size.sm};
line-height: ${theme.typography.lineHeight.xs};
+ vertical-align: baseline;
background-color: ${colors.color};
color: ${theme.palette.gray98};
white-space: nowrap;
text-shadow: none;
padding: 3px 6px;
border-radius: ${theme.border.radius.md};
- display: flex;
- align-items: center;
- gap: 3px;
`,
hover: css`
&:hover {
diff --git a/packages/grafana-ui/src/components/Tags/TagList.story.tsx b/packages/grafana-ui/src/components/Tags/TagList.story.tsx
index f4281880aef..c40dd9cbaaf 100644
--- a/packages/grafana-ui/src/components/Tags/TagList.story.tsx
+++ b/packages/grafana-ui/src/components/Tags/TagList.story.tsx
@@ -1,9 +1,10 @@
import { action } from '@storybook/addon-actions';
+import { Story } from '@storybook/react';
import React from 'react';
import { withCenteredStory } from '../../utils/storybook/withCenteredStory';
-import { TagList } from './TagList';
+import { TagList, Props as TagListProps } from './TagList';
import mdx from './TagList.mdx';
export default {
@@ -14,15 +15,31 @@ export default {
docs: {
page: mdx,
},
+ controls: {
+ exclude: ['className', 'onClick', 'getAriaLabel'],
+ },
+ },
+ args: {
+ displayMax: 3,
+ tags: ['datasource-test', 'gdev', 'mysql', 'mssql'],
+ onClick: action('Tag clicked'),
+ showIcon: false,
},
};
-const tags = ['datasource-test', 'gdev', 'mysql', 'mssql'];
+interface StoryProps extends TagListProps {
+ showIcon?: boolean;
+}
-export const list = () => {
+export const List: Story = (args) => {
return (
-
+
);
};
diff --git a/packages/grafana-ui/src/components/Tags/TagList.tsx b/packages/grafana-ui/src/components/Tags/TagList.tsx
index d22be614c5a..20ea4d66fe1 100644
--- a/packages/grafana-ui/src/components/Tags/TagList.tsx
+++ b/packages/grafana-ui/src/components/Tags/TagList.tsx
@@ -9,8 +9,11 @@ import { IconName } from '../../types/icon';
import { OnTagClick, Tag } from './Tag';
export interface Props {
+ /** Maximum number of the tags to display */
displayMax?: number;
+ /** Names of the tags to display */
tags: string[];
+ /** Callback when the tag is clicked */
onClick?: OnTagClick;
/** Custom styles for the wrapper component */
className?: string;
@@ -33,8 +36,8 @@ export const TagList = memo(
))}
- {displayMax && displayMax > 0 && numTags - 1 > 0 && (
- + {numTags - 1}
+ {displayMax && displayMax > 0 && numTags - displayMax > 0 && (
+ + {numTags - displayMax}
)}
);
diff --git a/packages/grafana-ui/src/components/index.ts b/packages/grafana-ui/src/components/index.ts
index e9dd8b85ada..7d5605ebc2c 100644
--- a/packages/grafana-ui/src/components/index.ts
+++ b/packages/grafana-ui/src/components/index.ts
@@ -212,6 +212,8 @@ export { Input, getInputStyles } from './Input/Input';
export { AutoSizeInput } from './Input/AutoSizeInput';
export { FilterInput } from './FilterInput/FilterInput';
export { FormInputSize } from './Forms/types';
+export * from './SecretInput';
+export * from './SecretTextArea';
export { Switch, InlineSwitch } from './Switch/Switch';
export { Checkbox } from './Forms/Checkbox';
diff --git a/packages/grafana-ui/src/themes/GlobalStyles/page.ts b/packages/grafana-ui/src/themes/GlobalStyles/page.ts
index b0b961da430..481d690b233 100644
--- a/packages/grafana-ui/src/themes/GlobalStyles/page.ts
+++ b/packages/grafana-ui/src/themes/GlobalStyles/page.ts
@@ -26,13 +26,6 @@ export function getPageStyles(theme: GrafanaTheme2) {
flex: 1 1 0;
}
- .page-scrollbar-wrapper {
- width: 100%;
- flex-grow: 1;
- width: 100%;
- min-height: 0;
- }
-
.page-scrollbar-content {
display: flex;
min-height: 100%;
diff --git a/packages/jaeger-ui-components/package.json b/packages/jaeger-ui-components/package.json
index b54ab4c63a4..8972a296add 100644
--- a/packages/jaeger-ui-components/package.json
+++ b/packages/jaeger-ui-components/package.json
@@ -33,6 +33,7 @@
"@emotion/css": "11.9.0",
"@grafana/data": "9.1.0-pre",
"@grafana/e2e-selectors": "9.1.0-pre",
+ "@grafana/runtime": "9.1.0-pre",
"@grafana/ui": "9.1.0-pre",
"chance": "^1.0.10",
"classnames": "^2.2.5",
diff --git a/packages/jaeger-ui-components/src/TraceTimelineViewer/index.test.js b/packages/jaeger-ui-components/src/TraceTimelineViewer/index.test.js
index b116b3c2e7d..17d0e9f30a4 100644
--- a/packages/jaeger-ui-components/src/TraceTimelineViewer/index.test.js
+++ b/packages/jaeger-ui-components/src/TraceTimelineViewer/index.test.js
@@ -24,6 +24,13 @@ import TimelineHeaderRow from './TimelineHeaderRow';
import TraceTimelineViewer from './index';
+jest.mock('@grafana/runtime', () => {
+ return {
+ ...jest.requireActual('@grafana/runtime'),
+ reportInteraction: jest.fn(),
+ };
+});
+
describe('', () => {
const trace = transformTraceData(traceGenerator.trace({}));
const props = {
diff --git a/packages/jaeger-ui-components/src/TraceTimelineViewer/index.tsx b/packages/jaeger-ui-components/src/TraceTimelineViewer/index.tsx
index 8b966c86155..abbd98fabff 100644
--- a/packages/jaeger-ui-components/src/TraceTimelineViewer/index.tsx
+++ b/packages/jaeger-ui-components/src/TraceTimelineViewer/index.tsx
@@ -16,6 +16,7 @@ import { css } from '@emotion/css';
import React, { RefObject } from 'react';
import { GrafanaTheme2, LinkModel, TimeZone } from '@grafana/data';
+import { reportInteraction } from '@grafana/runtime';
import { stylesFactory, withTheme2 } from '@grafana/ui';
import { Accessors } from '../ScrollManager';
@@ -76,6 +77,7 @@ type TProps = TExtractUiFindFromStateReturn & {
scrollToFirstVisibleSpan: () => void;
traceTimeline: TTraceTimeline;
trace: Trace;
+ datasourceType: string;
spanBarOptions: SpanBarOptions | undefined;
updateNextViewRangeTime: (update: ViewRangeTimeUpdate) => void;
updateViewRangeTime: TUpdateViewRangeTimeFunction;
@@ -143,18 +145,34 @@ export class UnthemedTraceTimelineViewer extends React.PureComponent {
this.props.collapseAll(this.props.trace.spans);
+ reportInteraction('grafana_traces_traceID_expand_collapse_clicked', {
+ datasourceType: this.props.datasourceType,
+ type: 'collapseAll',
+ });
};
collapseOne = () => {
this.props.collapseOne(this.props.trace.spans);
+ reportInteraction('grafana_traces_traceID_expand_collapse_clicked', {
+ datasourceType: this.props.datasourceType,
+ type: 'collapseOne',
+ });
};
expandAll = () => {
this.props.expandAll();
+ reportInteraction('grafana_traces_traceID_expand_collapse_clicked', {
+ datasourceType: this.props.datasourceType,
+ type: 'expandAll',
+ });
};
expandOne = () => {
this.props.expandOne(this.props.trace.spans);
+ reportInteraction('grafana_traces_traceID_expand_collapse_clicked', {
+ datasourceType: this.props.datasourceType,
+ type: 'expandOne',
+ });
};
render() {
diff --git a/pkg/api/alerting.go b/pkg/api/alerting.go
index 6e28c5c6558..d9d868f4e96 100644
--- a/pkg/api/alerting.go
+++ b/pkg/api/alerting.go
@@ -507,91 +507,107 @@ func (hs *HTTPServer) NotificationTest(c *models.ReqContext) response.Response {
}
// POST /api/alerts/:alertId/pause
-func (hs *HTTPServer) PauseAlert(c *models.ReqContext) response.Response {
- dto := dtos.PauseAlertCommand{}
- if err := web.Bind(c.Req, &dto); err != nil {
- return response.Error(http.StatusBadRequest, "bad request data", err)
- }
- alertID, err := strconv.ParseInt(web.Params(c.Req)[":alertId"], 10, 64)
- if err != nil {
- return response.Error(http.StatusBadRequest, "alertId is invalid", err)
- }
- result := make(map[string]interface{})
- result["alertId"] = alertID
-
- query := models.GetAlertByIdQuery{Id: alertID}
- if err := hs.SQLStore.GetAlertById(c.Req.Context(), &query); err != nil {
- return response.Error(500, "Get Alert failed", err)
+func (hs *HTTPServer) PauseAlert(legacyAlertingEnabled *bool) func(c *models.ReqContext) response.Response {
+ if legacyAlertingEnabled == nil || !*legacyAlertingEnabled {
+ return func(_ *models.ReqContext) response.Response {
+ return response.Error(http.StatusBadRequest, "legacy alerting is disabled, so this call has no effect.", nil)
+ }
}
- guardian := guardian.New(c.Req.Context(), query.Result.DashboardId, c.OrgId, c.SignedInUser)
- if canEdit, err := guardian.CanEdit(); err != nil || !canEdit {
+ return func(c *models.ReqContext) response.Response {
+ dto := dtos.PauseAlertCommand{}
+ if err := web.Bind(c.Req, &dto); err != nil {
+ return response.Error(http.StatusBadRequest, "bad request data", err)
+ }
+ alertID, err := strconv.ParseInt(web.Params(c.Req)[":alertId"], 10, 64)
if err != nil {
- return response.Error(500, "Error while checking permissions for Alert", err)
+ return response.Error(http.StatusBadRequest, "alertId is invalid", err)
}
+ result := make(map[string]interface{})
+ result["alertId"] = alertID
- return response.Error(403, "Access denied to this dashboard and alert", nil)
- }
+ query := models.GetAlertByIdQuery{Id: alertID}
+ if err := hs.SQLStore.GetAlertById(c.Req.Context(), &query); err != nil {
+ return response.Error(500, "Get Alert failed", err)
+ }
- // Alert state validation
- if query.Result.State != models.AlertStatePaused && !dto.Paused {
- result["state"] = "un-paused"
- result["message"] = "Alert is already un-paused"
- return response.JSON(http.StatusOK, result)
- } else if query.Result.State == models.AlertStatePaused && dto.Paused {
- result["state"] = models.AlertStatePaused
- result["message"] = "Alert is already paused"
- return response.JSON(http.StatusOK, result)
- }
+ guardian := guardian.New(c.Req.Context(), query.Result.DashboardId, c.OrgId, c.SignedInUser)
+ if canEdit, err := guardian.CanEdit(); err != nil || !canEdit {
+ if err != nil {
+ return response.Error(500, "Error while checking permissions for Alert", err)
+ }
- cmd := models.PauseAlertCommand{
- OrgId: c.OrgId,
- AlertIds: []int64{alertID},
- Paused: dto.Paused,
- }
+ return response.Error(403, "Access denied to this dashboard and alert", nil)
+ }
- if err := hs.SQLStore.PauseAlert(c.Req.Context(), &cmd); err != nil {
- return response.Error(500, "", err)
- }
+ // Alert state validation
+ if query.Result.State != models.AlertStatePaused && !dto.Paused {
+ result["state"] = "un-paused"
+ result["message"] = "Alert is already un-paused"
+ return response.JSON(http.StatusOK, result)
+ } else if query.Result.State == models.AlertStatePaused && dto.Paused {
+ result["state"] = models.AlertStatePaused
+ result["message"] = "Alert is already paused"
+ return response.JSON(http.StatusOK, result)
+ }
- resp := models.AlertStateUnknown
- pausedState := "un-paused"
- if cmd.Paused {
- resp = models.AlertStatePaused
- pausedState = "paused"
- }
+ cmd := models.PauseAlertCommand{
+ OrgId: c.OrgId,
+ AlertIds: []int64{alertID},
+ Paused: dto.Paused,
+ }
- result["state"] = resp
- result["message"] = "Alert " + pausedState
- return response.JSON(http.StatusOK, result)
+ if err := hs.SQLStore.PauseAlert(c.Req.Context(), &cmd); err != nil {
+ return response.Error(500, "", err)
+ }
+
+ resp := models.AlertStateUnknown
+ pausedState := "un-paused"
+ if cmd.Paused {
+ resp = models.AlertStatePaused
+ pausedState = "paused"
+ }
+
+ result["state"] = resp
+ result["message"] = "Alert " + pausedState
+ return response.JSON(http.StatusOK, result)
+ }
}
// POST /api/admin/pause-all-alerts
-func (hs *HTTPServer) PauseAllAlerts(c *models.ReqContext) response.Response {
- dto := dtos.PauseAllAlertsCommand{}
- if err := web.Bind(c.Req, &dto); err != nil {
- return response.Error(http.StatusBadRequest, "bad request data", err)
- }
- updateCmd := models.PauseAllAlertCommand{
- Paused: dto.Paused,
+func (hs *HTTPServer) PauseAllAlerts(legacyAlertingEnabled *bool) func(c *models.ReqContext) response.Response {
+ if legacyAlertingEnabled == nil || !*legacyAlertingEnabled {
+ return func(_ *models.ReqContext) response.Response {
+ return response.Error(http.StatusBadRequest, "legacy alerting is disabled, so this call has no effect.", nil)
+ }
}
- if err := hs.SQLStore.PauseAllAlerts(c.Req.Context(), &updateCmd); err != nil {
- return response.Error(500, "Failed to pause alerts", err)
- }
+ return func(c *models.ReqContext) response.Response {
+ dto := dtos.PauseAllAlertsCommand{}
+ if err := web.Bind(c.Req, &dto); err != nil {
+ return response.Error(http.StatusBadRequest, "bad request data", err)
+ }
+ updateCmd := models.PauseAllAlertCommand{
+ Paused: dto.Paused,
+ }
- resp := models.AlertStatePending
- pausedState := "un paused"
- if updateCmd.Paused {
- resp = models.AlertStatePaused
- pausedState = "paused"
- }
+ if err := hs.SQLStore.PauseAllAlerts(c.Req.Context(), &updateCmd); err != nil {
+ return response.Error(500, "Failed to pause alerts", err)
+ }
- result := map[string]interface{}{
- "state": resp,
- "message": "alerts " + pausedState,
- "alertsAffected": updateCmd.ResultCount,
- }
+ resp := models.AlertStatePending
+ pausedState := "un paused"
+ if updateCmd.Paused {
+ resp = models.AlertStatePaused
+ pausedState = "paused"
+ }
- return response.JSON(http.StatusOK, result)
+ result := map[string]interface{}{
+ "state": resp,
+ "message": "alerts " + pausedState,
+ "alertsAffected": updateCmd.ResultCount,
+ }
+
+ return response.JSON(http.StatusOK, result)
+ }
}
diff --git a/pkg/api/alerting_test.go b/pkg/api/alerting_test.go
index f5f2ee9961b..70df8539ca6 100644
--- a/pkg/api/alerting_test.go
+++ b/pkg/api/alerting_test.go
@@ -145,7 +145,9 @@ func postAlertScenario(t *testing.T, hs *HTTPServer, desc string, url string, ro
sc.context.OrgId = testOrgID
sc.context.OrgRole = role
- return hs.PauseAlert(c)
+ legacyAlertingEnabled := new(bool)
+ *legacyAlertingEnabled = true
+ return hs.PauseAlert(legacyAlertingEnabled)(c)
})
sc.m.Post(routePattern, sc.defaultHandler)
diff --git a/pkg/api/api.go b/pkg/api/api.go
index 167a94fda63..5fbb25a1b04 100644
--- a/pkg/api/api.go
+++ b/pkg/api/api.go
@@ -16,6 +16,7 @@ import (
"github.com/grafana/grafana/pkg/services/featuremgmt"
publicdashboardsapi "github.com/grafana/grafana/pkg/services/publicdashboards/api"
"github.com/grafana/grafana/pkg/services/serviceaccounts"
+ "github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/web"
)
@@ -460,7 +461,7 @@ func (hs *HTTPServer) registerRoutes() {
apiRoute.Group("/alerts", func(alertsRoute routing.RouteRegister) {
alertsRoute.Post("/test", routing.Wrap(hs.AlertTest))
- alertsRoute.Post("/:alertId/pause", reqEditorRole, routing.Wrap(hs.PauseAlert))
+ alertsRoute.Post("/:alertId/pause", reqEditorRole, routing.Wrap(hs.PauseAlert(setting.AlertingEnabled)))
alertsRoute.Get("/:alertId", hs.ValidateOrgAlert, routing.Wrap(hs.GetAlert))
alertsRoute.Get("/", routing.Wrap(hs.GetAlerts))
alertsRoute.Get("/states-for-dashboard", routing.Wrap(hs.GetAlertStatesForDashboard))
@@ -554,7 +555,7 @@ func (hs *HTTPServer) registerRoutes() {
adminRoute.Get("/settings/features", authorize(reqGrafanaAdmin, ac.EvalPermission(ac.ActionSettingsRead)), hs.Features.HandleGetSettings)
}
adminRoute.Get("/stats", authorize(reqGrafanaAdmin, ac.EvalPermission(ac.ActionServerStatsRead)), routing.Wrap(hs.AdminGetStats))
- adminRoute.Post("/pause-all-alerts", reqGrafanaAdmin, routing.Wrap(hs.PauseAllAlerts))
+ adminRoute.Post("/pause-all-alerts", reqGrafanaAdmin, routing.Wrap(hs.PauseAllAlerts(setting.AlertingEnabled)))
if hs.ThumbService != nil && hs.Features.IsEnabled(featuremgmt.FlagDashboardPreviewsAdmin) {
adminRoute.Post("/crawler/start", reqGrafanaAdmin, routing.Wrap(hs.ThumbService.StartCrawler))
@@ -566,6 +567,7 @@ func (hs *HTTPServer) registerRoutes() {
adminRoute.Get("/export", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleGetStatus))
adminRoute.Post("/export", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleRequestExport))
adminRoute.Post("/export/stop", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleRequestStop))
+ adminRoute.Get("/export/options", reqGrafanaAdmin, routing.Wrap(hs.ExportService.HandleGetOptions))
}
adminRoute.Post("/encryption/rotate-data-keys", reqGrafanaAdmin, routing.Wrap(hs.AdminRotateDataEncryptionKeys))
diff --git a/pkg/api/datasources.go b/pkg/api/datasources.go
index d2491fb863f..ef075b4b200 100644
--- a/pkg/api/datasources.go
+++ b/pkg/api/datasources.go
@@ -626,13 +626,9 @@ func (hs *HTTPServer) checkDatasourceHealth(c *models.ReqContext, ds *datasource
return response.JSON(http.StatusOK, payload)
}
-func (hs *HTTPServer) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) map[string]string {
- return func(ds *datasources.DataSource) map[string]string {
- decryptedJsonData, err := hs.DataSourcesService.DecryptedValues(ctx, ds)
- if err != nil {
- hs.log.Error("Failed to decrypt secure json data", "error", err)
- }
- return decryptedJsonData
+func (hs *HTTPServer) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) (map[string]string, error) {
+ return func(ds *datasources.DataSource) (map[string]string, error) {
+ return hs.DataSourcesService.DecryptedValues(ctx, ds)
}
}
diff --git a/pkg/api/docs/definitions/playlists.go b/pkg/api/docs/definitions/playlists.go
new file mode 100644
index 00000000000..321a137601b
--- /dev/null
+++ b/pkg/api/docs/definitions/playlists.go
@@ -0,0 +1,177 @@
+package definitions
+
+import (
+ "github.com/grafana/grafana/pkg/api/dtos"
+ "github.com/grafana/grafana/pkg/models"
+)
+
+// swagger:route GET /playlists playlists searchPlaylists
+//
+// Get playlists.
+//
+// Responses:
+// 200: searchPlaylistsResponse
+// 500: internalServerError
+
+// swagger:route GET /playlists/{uid} playlists getPlaylist
+//
+// Get playlist by UID.
+//
+// Responses:
+// 200: getPlaylistResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:route GET /playlists/{uid}/items playlists getPlaylistItems
+//
+// Get playlist items.
+//
+// Responses:
+// 200: getPlaylistItemsResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:route GET /playlists/{uid}/dashboards playlists getPlaylistDashboards
+//
+// Get playlist dashboards.
+//
+// Responses:
+// 200: getPlaylistDashboardsResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:route DELETE /playlists/{uid} playlists deletePlaylist
+//
+// Delete pllaylist.
+//
+// Responses:
+// 200: okResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:route PUT /playlists/{uid} playlists updatePlaylist
+//
+// Update playlist.
+//
+// Responses:
+// 200: updatePlaylistResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:route POST /playlists playlists createPlaylist
+//
+// Create playlist.
+//
+// Responses:
+// 200: createPlaylistResponse
+// 401: unauthorisedError
+// 403: forbiddenError
+// 404: notFoundError
+// 500: internalServerError
+
+// swagger:parameters searchPlaylists
+type SearchPlaylistsParams struct {
+ // in:query
+ // required:false
+ Query string `json:"query"`
+ // in:limit
+ // required:false
+ Limit int `json:"limit"`
+}
+
+// swagger:parameters getPlaylist
+type GetPlaylistParams struct {
+ // in:path
+ // required:true
+ UID string `json:"uid"`
+}
+
+// swagger:parameters getPlaylistItems
+type GetPlaylistItemsParams struct {
+ // in:path
+ // required:true
+ UID string `json:"uid"`
+}
+
+// swagger:parameters getPlaylistDashboards
+type GetPlaylistDashboardsParams struct {
+ // in:path
+ // required:true
+ UID string `json:"uid"`
+}
+
+// swagger:parameters deletePlaylist
+type DeletePlaylistParams struct {
+ // in:path
+ // required:true
+ UID string `json:"uid"`
+}
+
+// swagger:parameters updatePlaylist
+type UpdatePlaylistParams struct {
+ // in:body
+ // required:true
+ Body models.UpdatePlaylistCommand
+ // in:path
+ // required:true
+ UID string `json:"uid"`
+}
+
+// swagger:parameters createPlaylist
+type CreatePlaylistParams struct {
+ // in:body
+ // required:true
+ Body models.CreatePlaylistCommand
+}
+
+// swagger:response searchPlaylistsResponse
+type SearchPlaylistsResponse struct {
+ // The response message
+ // in: body
+ Body models.Playlists `json:"body"`
+}
+
+// swagger:response getPlaylistResponse
+type GetPlaylistResponse struct {
+ // The response message
+ // in: body
+ Body *models.PlaylistDTO `json:"body"`
+}
+
+// swagger:response getPlaylistItemsResponse
+type GetPlaylistItemsResponse struct {
+ // The response message
+ // in: body
+ Body []models.PlaylistItemDTO `json:"body"`
+}
+
+// swagger:response getPlaylistDashboardsResponse
+type GetPlaylistDashboardsResponse struct {
+ // The response message
+ // in: body
+ Body dtos.PlaylistDashboardsSlice `json:"body"`
+}
+
+// swagger:response updatePlaylistResponse
+type UpdatePlaylistResponseResponse struct {
+ // The response message
+ // in: body
+ Body *models.PlaylistDTO `json:"body"`
+}
+
+// swagger:response createPlaylistResponse
+type CreatePlaylistResponse struct {
+ // The response message
+ // in: body
+ Body *models.Playlist `json:"body"`
+}
diff --git a/pkg/api/http_server.go b/pkg/api/http_server.go
index ac80801de41..85699c01490 100644
--- a/pkg/api/http_server.go
+++ b/pkg/api/http_server.go
@@ -519,7 +519,7 @@ func (hs *HTTPServer) addMiddlewaresAndStaticRoutes() {
}
m.Use(middleware.Recovery(hs.Cfg))
- m.UseMiddleware(hs.Csrf.Middleware(hs.log))
+ m.UseMiddleware(hs.Csrf.Middleware())
hs.mapStatic(m, hs.Cfg.StaticRootPath, "build", "public/build")
hs.mapStatic(m, hs.Cfg.StaticRootPath, "", "public", "/public/views/swagger.html")
diff --git a/pkg/api/index.go b/pkg/api/index.go
index 9ab27c752c5..de644dabb94 100644
--- a/pkg/api/index.go
+++ b/pkg/api/index.go
@@ -36,17 +36,17 @@ func (hs *HTTPServer) getProfileNode(c *models.ReqContext) *dtos.NavLink {
children := []*dtos.NavLink{
{
- Text: "Preferences", Id: "profile-settings", Url: hs.Cfg.AppSubURL + "/profile", Icon: "sliders-v-alt",
+ Text: "Preferences", Id: "profile/settings", Url: hs.Cfg.AppSubURL + "/profile", Icon: "sliders-v-alt",
},
}
children = append(children, &dtos.NavLink{
- Text: "Notification history", Id: "notifications", Url: hs.Cfg.AppSubURL + "/notifications", Icon: "bell",
+ Text: "Notification history", Id: "profile/notifications", Url: hs.Cfg.AppSubURL + "profile/notifications", Icon: "bell",
})
if setting.AddChangePasswordLink() {
children = append(children, &dtos.NavLink{
- Text: "Change password", Id: "change-password", Url: hs.Cfg.AppSubURL + "/profile/password",
+ Text: "Change password", Id: "profile/password", Url: hs.Cfg.AppSubURL + "/profile/password",
Icon: "lock",
})
}
diff --git a/pkg/api/metrics.go b/pkg/api/metrics.go
index 69bc2b8eb78..7e611bc85c2 100644
--- a/pkg/api/metrics.go
+++ b/pkg/api/metrics.go
@@ -2,6 +2,7 @@ package api
import (
"errors"
+ "fmt"
"net/http"
"github.com/grafana/grafana-plugin-sdk-go/backend"
@@ -24,7 +25,13 @@ func (hs *HTTPServer) handleQueryMetricsError(err error) *response.NormalRespons
if errors.Is(err, datasources.ErrDataSourceNotFound) {
return response.Error(http.StatusNotFound, "Data source not found", err)
}
- var badQuery *query.ErrBadQuery
+
+ var secretsPlugin datasources.ErrDatasourceSecretsPluginUserFriendly
+ if errors.As(err, &secretsPlugin) {
+ return response.Error(http.StatusInternalServerError, fmt.Sprint("Secrets Plugin error: ", err.Error()), err)
+ }
+
+ var badQuery query.ErrBadQuery
if errors.As(err, &badQuery) {
return response.Error(http.StatusBadRequest, util.Capitalize(badQuery.Message), err)
}
diff --git a/pkg/api/metrics_test.go b/pkg/api/metrics_test.go
index fedffcc8414..60783307019 100644
--- a/pkg/api/metrics_test.go
+++ b/pkg/api/metrics_test.go
@@ -1,7 +1,9 @@
package api
import (
+ "bytes"
"context"
+ "encoding/json"
"fmt"
"net/http"
"strings"
@@ -40,6 +42,11 @@ type fakePluginRequestValidator struct {
err error
}
+type secretsErrorResponseBody struct {
+ Error string `json:"error"`
+ Message string `json:"message"`
+}
+
func (rv *fakePluginRequestValidator) Validate(dsURL string, req *http.Request) error {
return rv.err
}
@@ -104,3 +111,44 @@ func TestAPIEndpoint_Metrics_QueryMetricsV2(t *testing.T) {
require.Equal(t, http.StatusMultiStatus, resp.StatusCode)
})
}
+
+func TestAPIEndpoint_Metrics_PluginDecryptionFailure(t *testing.T) {
+ qds := query.ProvideService(
+ nil,
+ nil,
+ nil,
+ &fakePluginRequestValidator{},
+ &fakeDatasources.FakeDataSourceService{SimulatePluginFailure: true},
+ &fakePluginClient{
+ QueryDataHandlerFunc: func(ctx context.Context, req *backend.QueryDataRequest) (*backend.QueryDataResponse, error) {
+ resp := backend.Responses{
+ "A": backend.DataResponse{
+ Error: fmt.Errorf("query failed"),
+ },
+ }
+ return &backend.QueryDataResponse{Responses: resp}, nil
+ },
+ },
+ &fakeOAuthTokenService{},
+ )
+ httpServer := SetupAPITestServer(t, func(hs *HTTPServer) {
+ hs.queryDataService = qds
+ })
+
+ t.Run("Status code is 500 and a secrets plugin error is returned if there is a problem getting secrets from the remote plugin", func(t *testing.T) {
+ req := httpServer.NewPostRequest("/api/ds/query", strings.NewReader(queryDatasourceInput))
+ webtest.RequestWithSignedInUser(req, &models.SignedInUser{UserId: 1, OrgId: 1, OrgRole: models.ROLE_VIEWER})
+ resp, err := httpServer.SendJSON(req)
+ require.NoError(t, err)
+ require.Equal(t, http.StatusInternalServerError, resp.StatusCode)
+ buf := new(bytes.Buffer)
+ _, err = buf.ReadFrom(resp.Body)
+ require.NoError(t, err)
+ require.NoError(t, resp.Body.Close())
+ var resObj secretsErrorResponseBody
+ err = json.Unmarshal(buf.Bytes(), &resObj)
+ require.NoError(t, err)
+ require.Equal(t, "unknown error", resObj.Error)
+ require.Contains(t, resObj.Message, "Secrets Plugin error:")
+ })
+}
diff --git a/pkg/api/response/response.go b/pkg/api/response/response.go
index 604a84dd6da..744a8f0ab45 100644
--- a/pkg/api/response/response.go
+++ b/pkg/api/response/response.go
@@ -239,6 +239,22 @@ func Err(err error) *NormalResponse {
return resp
}
+// ErrOrFallback uses the information in an errutil.Error if available
+// and otherwise falls back to the status and message provided as
+// arguments.
+//
+// The signature is equivalent to that of Error which allows us to
+// rename this to Error when we're confident that that would be safe to
+// do.
+func ErrOrFallback(status int, message string, err error) *NormalResponse {
+ grafanaErr := &errutil.Error{}
+ if errors.As(err, grafanaErr) {
+ return Err(err)
+ }
+
+ return Error(status, message, err)
+}
+
// Empty creates an empty NormalResponse.
func Empty(status int) *NormalResponse {
return Respond(status, nil)
diff --git a/pkg/api/response/response_test.go b/pkg/api/response/response_test.go
new file mode 100644
index 00000000000..67658cb497a
--- /dev/null
+++ b/pkg/api/response/response_test.go
@@ -0,0 +1,127 @@
+package response
+
+import (
+ "errors"
+ "net/http"
+ "testing"
+
+ "github.com/grafana/grafana/pkg/util/errutil"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestErrors(t *testing.T) {
+ const fakeNotFoundMessage = "I looked, but did not find the thing"
+ const genericErrorMessage = "Something went wrong in parsing the request"
+
+ cases := []struct {
+ name string
+
+ // inputs
+ err error
+ statusCode int
+ message string
+
+ // responses
+ legacyResponse *NormalResponse
+ newResponse *NormalResponse
+ fallbackUseNew bool
+ compareErr bool
+ }{
+ {
+ name: "base case",
+
+ legacyResponse: &NormalResponse{},
+ newResponse: &NormalResponse{
+ status: http.StatusInternalServerError,
+ },
+ },
+ {
+ name: "not found error",
+
+ err: errors.New("not found"),
+ statusCode: http.StatusNotFound,
+ message: fakeNotFoundMessage,
+
+ legacyResponse: &NormalResponse{
+ status: http.StatusNotFound,
+ errMessage: fakeNotFoundMessage,
+ },
+ newResponse: &NormalResponse{
+ status: http.StatusInternalServerError,
+ },
+ },
+ {
+ name: "grafana error with fallback to other error",
+
+ err: errutil.NewBase(errutil.StatusTimeout, "thing.timeout").Errorf("whoops"),
+ statusCode: http.StatusBadRequest,
+ message: genericErrorMessage,
+
+ legacyResponse: &NormalResponse{
+ status: http.StatusBadRequest,
+ errMessage: genericErrorMessage,
+ },
+ newResponse: &NormalResponse{
+ status: http.StatusGatewayTimeout,
+ errMessage: errutil.StatusTimeout.String(),
+ },
+ fallbackUseNew: true,
+ },
+ }
+
+ compareResponses := func(expected *NormalResponse, actual *NormalResponse, compareErr bool) func(t *testing.T) {
+ return func(t *testing.T) {
+ if expected == nil {
+ require.Nil(t, actual)
+ return
+ }
+
+ require.NotNil(t, actual)
+ assert.Equal(t, expected.status, actual.status)
+ if expected.body != nil {
+ assert.Equal(t, expected.body.Bytes(), actual.body.Bytes())
+ }
+ if expected.header != nil {
+ assert.EqualValues(t, expected.header, actual.header)
+ }
+ assert.Equal(t, expected.errMessage, actual.errMessage)
+ if compareErr {
+ assert.ErrorIs(t, expected.err, actual.err)
+ }
+ }
+ }
+
+ for _, tc := range cases {
+ tc := tc
+ t.Run(
+ tc.name+" Error",
+ compareResponses(tc.legacyResponse, Error(
+ tc.statusCode,
+ tc.message,
+ tc.err,
+ ), tc.compareErr),
+ )
+
+ t.Run(
+ tc.name+" Err",
+ compareResponses(tc.newResponse, Err(
+ tc.err,
+ ), tc.compareErr),
+ )
+
+ fallbackResponse := tc.legacyResponse
+ if tc.fallbackUseNew {
+ fallbackResponse = tc.newResponse
+ }
+ t.Run(
+ tc.name+" ErrOrFallback",
+ compareResponses(fallbackResponse, ErrOrFallback(
+ tc.statusCode,
+ tc.message,
+ tc.err,
+ ), tc.compareErr),
+ )
+ }
+}
diff --git a/pkg/events/events.go b/pkg/events/events.go
index fd77a9a4061..529147d2661 100644
--- a/pkg/events/events.go
+++ b/pkg/events/events.go
@@ -55,6 +55,14 @@ type DataSourceDeleted struct {
OrgID int64 `json:"org_id"`
}
+type DataSourceSecretDeleted struct {
+ Timestamp time.Time `json:"timestamp"`
+ Name string `json:"name"`
+ ID int64 `json:"id"`
+ UID string `json:"uid"`
+ OrgID int64 `json:"org_id"`
+}
+
type DataSourceCreated struct {
Timestamp time.Time `json:"timestamp"`
Name string `json:"name"`
diff --git a/pkg/expr/transform.go b/pkg/expr/transform.go
index 51cd60ce055..b3ae0de2532 100644
--- a/pkg/expr/transform.go
+++ b/pkg/expr/transform.go
@@ -127,12 +127,8 @@ func hiddenRefIDs(queries []Query) (map[string]struct{}, error) {
return hidden, nil
}
-func (s *Service) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) map[string]string {
- return func(ds *datasources.DataSource) map[string]string {
- decryptedJsonData, err := s.dataSourceService.DecryptedValues(ctx, ds)
- if err != nil {
- logger.Error("Failed to decrypt secure json data", "error", err)
- }
- return decryptedJsonData
+func (s *Service) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) (map[string]string, error) {
+ return func(ds *datasources.DataSource) (map[string]string, error) {
+ return s.dataSourceService.DecryptedValues(ctx, ds)
}
}
diff --git a/pkg/middleware/csrf/csrf.go b/pkg/middleware/csrf/csrf.go
index a9694665099..85ca6069837 100644
--- a/pkg/middleware/csrf/csrf.go
+++ b/pkg/middleware/csrf/csrf.go
@@ -2,112 +2,62 @@ package csrf
import (
"errors"
+ "fmt"
"net/http"
"net/url"
+ "reflect"
- "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/setting"
"github.com/grafana/grafana/pkg/util"
)
type Service interface {
- Middleware(logger log.Logger) func(http.Handler) http.Handler
+ Middleware() func(http.Handler) http.Handler
TrustOrigin(origin string)
- AddOriginHeader(headerName string)
+ AddAdditionalHeaders(headerName string)
AddSafeEndpoint(endpoint string)
}
-type Implementation struct {
+type CSRF struct {
cfg *setting.Cfg
trustedOrigins map[string]struct{}
- originHeaders map[string]struct{}
+ headers map[string]struct{}
safeEndpoints map[string]struct{}
}
func ProvideCSRFFilter(cfg *setting.Cfg) Service {
- i := &Implementation{
+ c := &CSRF{
cfg: cfg,
trustedOrigins: map[string]struct{}{},
- originHeaders: map[string]struct{}{
- "Origin": {},
- },
- safeEndpoints: map[string]struct{}{},
+ headers: map[string]struct{}{},
+ safeEndpoints: map[string]struct{}{},
}
additionalHeaders := cfg.SectionWithEnvOverrides("security").Key("csrf_additional_headers").Strings(" ")
trustedOrigins := cfg.SectionWithEnvOverrides("security").Key("csrf_trusted_origins").Strings(" ")
for _, header := range additionalHeaders {
- i.originHeaders[header] = struct{}{}
+ c.headers[header] = struct{}{}
}
for _, origin := range trustedOrigins {
- i.trustedOrigins[origin] = struct{}{}
+ c.trustedOrigins[origin] = struct{}{}
}
- return i
+ return c
}
-func (i *Implementation) Middleware(logger log.Logger) func(http.Handler) http.Handler {
- // As per RFC 7231/4.2.2 these methods are idempotent:
- // (GET is excluded because it may have side effects in some APIs)
- safeMethods := []string{"HEAD", "OPTIONS", "TRACE"}
-
+func (c *CSRF) Middleware() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- // If request has no login cookie - skip CSRF checks
- if _, err := r.Cookie(i.cfg.LoginCookieName); errors.Is(err, http.ErrNoCookie) {
- next.ServeHTTP(w, r)
- return
- }
- // Skip CSRF checks for "safe" methods
- for _, method := range safeMethods {
- if r.Method == method {
- next.ServeHTTP(w, r)
- return
- }
- }
- // Skip CSRF checks for "safe" endpoints
- for safeEndpoint := range i.safeEndpoints {
- if r.URL.Path == safeEndpoint {
- next.ServeHTTP(w, r)
- return
- }
- }
- // Otherwise - verify that Origin matches the server origin
- netAddr, err := util.SplitHostPortDefault(r.Host, "", "0") // we ignore the port
- if err != nil {
- http.Error(w, err.Error(), http.StatusBadRequest)
- return
- }
- origins := map[string]struct{}{}
- for header := range i.originHeaders {
- origin, err := url.Parse(r.Header.Get(header))
- if err != nil {
- logger.Error("error parsing Origin header", "header", header, "err", err)
- }
- if origin.String() != "" {
- origins[origin.Hostname()] = struct{}{}
- }
- }
-
- // No Origin header sent, skip CSRF check.
- if len(origins) == 0 {
- next.ServeHTTP(w, r)
- return
- }
+ e := &errorWithStatus{}
- trustedOrigin := false
- for o := range i.trustedOrigins {
- if _, ok := origins[o]; ok {
- trustedOrigin = true
- break
+ err := c.check(r)
+ if err != nil {
+ if !errors.As(err, &e) {
+ http.Error(w, fmt.Sprintf("internal server error: expected error type errorWithStatus, got %s. Error: %v", reflect.TypeOf(err), err), http.StatusInternalServerError)
}
- }
-
- _, hostnameMatches := origins[netAddr.Host]
- if netAddr.Host == "" || !trustedOrigin && !hostnameMatches {
- http.Error(w, "origin not allowed", http.StatusForbidden)
+ http.Error(w, err.Error(), e.HTTPStatus)
return
}
@@ -116,15 +66,96 @@ func (i *Implementation) Middleware(logger log.Logger) func(http.Handler) http.H
}
}
-func (i *Implementation) TrustOrigin(origin string) {
- i.trustedOrigins[origin] = struct{}{}
+func (c *CSRF) check(r *http.Request) error {
+ // As per RFC 7231/4.2.2 these methods are idempotent:
+ // (GET is excluded because it may have side effects in some APIs)
+ safeMethods := []string{"HEAD", "OPTIONS", "TRACE"}
+
+ // If request has no login cookie - skip CSRF checks
+ if _, err := r.Cookie(c.cfg.LoginCookieName); errors.Is(err, http.ErrNoCookie) {
+ return nil
+ }
+ // Skip CSRF checks for "safe" methods
+ for _, method := range safeMethods {
+ if r.Method == method {
+ return nil
+ }
+ }
+ // Skip CSRF checks for "safe" endpoints
+ for safeEndpoint := range c.safeEndpoints {
+ if r.URL.Path == safeEndpoint {
+ return nil
+ }
+ }
+ // Otherwise - verify that Origin matches the server origin
+ netAddr, err := util.SplitHostPortDefault(r.Host, "", "0") // we ignore the port
+ if err != nil {
+ return &errorWithStatus{Underlying: err, HTTPStatus: http.StatusBadRequest}
+ }
+
+ o := r.Header.Get("Origin")
+
+ // No Origin header sent, skip CSRF check.
+ if o == "" {
+ return nil
+ }
+
+ originURL, err := url.Parse(o)
+ if err != nil {
+ return &errorWithStatus{Underlying: err, HTTPStatus: http.StatusBadRequest}
+ }
+ origin := originURL.Hostname()
+
+ trustedOrigin := false
+ for h := range c.headers {
+ customHost := r.Header.Get(h)
+ addr, err := util.SplitHostPortDefault(customHost, "", "0") // we ignore the port
+ if err != nil {
+ return &errorWithStatus{Underlying: err, HTTPStatus: http.StatusBadRequest}
+ }
+ if addr.Host == origin {
+ trustedOrigin = true
+ break
+ }
+ }
+
+ for o := range c.trustedOrigins {
+ if o == origin {
+ trustedOrigin = true
+ break
+ }
+ }
+
+ hostnameMatches := origin == netAddr.Host
+ if netAddr.Host == "" || !trustedOrigin && !hostnameMatches {
+ return &errorWithStatus{Underlying: errors.New("origin not allowed"), HTTPStatus: http.StatusForbidden}
+ }
+
+ return nil
+}
+
+func (c *CSRF) TrustOrigin(origin string) {
+ c.trustedOrigins[origin] = struct{}{}
}
-func (i *Implementation) AddOriginHeader(headerName string) {
- i.originHeaders[headerName] = struct{}{}
+func (c *CSRF) AddAdditionalHeaders(headerName string) {
+ c.headers[headerName] = struct{}{}
}
// AddSafeEndpoint is used for endpoints requests to skip CSRF check
-func (i *Implementation) AddSafeEndpoint(endpoint string) {
- i.safeEndpoints[endpoint] = struct{}{}
+func (c *CSRF) AddSafeEndpoint(endpoint string) {
+ c.safeEndpoints[endpoint] = struct{}{}
+}
+
+type errorWithStatus struct {
+ Underlying error
+ HTTPStatus int
+}
+
+func (e errorWithStatus) Error() string {
+ return e.Underlying.Error()
+}
+
+func (e errorWithStatus) Unwrap() error {
+ return e.Underlying
}
diff --git a/pkg/middleware/csrf/csrf_test.go b/pkg/middleware/csrf/csrf_test.go
index f1e132ef593..470f20613ae 100644
--- a/pkg/middleware/csrf/csrf_test.go
+++ b/pkg/middleware/csrf/csrf_test.go
@@ -1,13 +1,15 @@
package csrf
import (
+ "errors"
"net/http"
"net/http/httptest"
+ "strings"
"testing"
+ "github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/setting"
)
@@ -100,6 +102,117 @@ func TestMiddlewareCSRF(t *testing.T) {
}
}
+func TestCSRF_Check(t *testing.T) {
+ tests := []struct {
+ name string
+ request *http.Request
+ addtHeader map[string]struct{}
+ trustedOrigins map[string]struct{}
+ safeEndpoints map[string]struct{}
+ expectedOK bool
+ expectedStatus int
+ }{
+ {
+ name: "base case",
+ request: postRequest(t, "", nil),
+ expectedOK: true,
+ },
+ {
+ name: "base with null origin header",
+ request: postRequest(t, "", map[string]string{"Origin": "null"}),
+ expectedStatus: http.StatusForbidden,
+ },
+ {
+ name: "grafana.org",
+ request: postRequest(t, "grafana.org", map[string]string{"Origin": "https://grafana.org"}),
+ expectedOK: true,
+ },
+ {
+ name: "grafana.org with X-Forwarded-Host",
+ request: postRequest(t, "grafana.localhost", map[string]string{"X-Forwarded-Host": "grafana.org", "Origin": "https://grafana.org"}),
+ expectedStatus: http.StatusForbidden,
+ },
+ {
+ name: "grafana.org with X-Forwarded-Host and header trusted",
+ request: postRequest(t, "grafana.localhost", map[string]string{"X-Forwarded-Host": "grafana.org", "Origin": "https://grafana.org"}),
+ addtHeader: map[string]struct{}{"X-Forwarded-Host": {}},
+ expectedOK: true,
+ },
+ {
+ name: "grafana.org from grafana.com",
+ request: postRequest(t, "grafana.org", map[string]string{"Origin": "https://grafana.com"}),
+ expectedStatus: http.StatusForbidden,
+ },
+ {
+ name: "grafana.org from grafana.com explicit trust for grafana.com",
+ request: postRequest(t, "grafana.org", map[string]string{"Origin": "https://grafana.com"}),
+ trustedOrigins: map[string]struct{}{"grafana.com": {}},
+ expectedOK: true,
+ },
+ {
+ name: "grafana.org from grafana.com with X-Forwarded-Host and header trusted",
+ request: postRequest(t, "grafana.localhost", map[string]string{"X-Forwarded-Host": "grafana.org", "Origin": "https://grafana.com"}),
+ addtHeader: map[string]struct{}{"X-Forwarded-Host": {}},
+ trustedOrigins: map[string]struct{}{"grafana.com": {}},
+ expectedOK: true,
+ },
+ {
+ name: "safe endpoint",
+ request: postRequest(t, "example.org/foo/bar", map[string]string{"Origin": "null"}),
+ safeEndpoints: map[string]struct{}{"foo/bar": {}},
+ expectedOK: true,
+ },
+ }
+
+ for _, tc := range tests {
+ tc := tc
+
+ t.Run(tc.name, func(t *testing.T) {
+ c := CSRF{
+ cfg: setting.NewCfg(),
+ trustedOrigins: tc.trustedOrigins,
+ headers: tc.addtHeader,
+ safeEndpoints: tc.safeEndpoints,
+ }
+ c.cfg.LoginCookieName = "LoginCookie"
+
+ err := c.check(tc.request)
+ if tc.expectedOK {
+ require.NoError(t, err)
+ } else {
+ require.Error(t, err)
+ var actual *errorWithStatus
+ require.True(t, errors.As(err, &actual))
+ assert.EqualValues(t, tc.expectedStatus, actual.HTTPStatus)
+ }
+ })
+ }
+}
+
+func postRequest(t testing.TB, hostname string, headers map[string]string) *http.Request {
+ t.Helper()
+ urlParts := strings.SplitN(hostname, "/", 2)
+
+ path := "/"
+ if len(urlParts) == 2 {
+ path = urlParts[1]
+ }
+ r, err := http.NewRequest(http.MethodPost, path, nil)
+ require.NoError(t, err)
+
+ r.Host = urlParts[0]
+
+ r.AddCookie(&http.Cookie{
+ Name: "LoginCookie",
+ Value: "this should not be important",
+ })
+
+ for k, v := range headers {
+ r.Header.Set(k, v)
+ }
+ return r
+}
+
func csrfScenario(t *testing.T, cookieName, method, origin, host string) *httptest.ResponseRecorder {
req, err := http.NewRequest(method, "/", nil)
if err != nil {
@@ -123,7 +236,7 @@ func csrfScenario(t *testing.T, cookieName, method, origin, host string) *httpte
cfg := setting.NewCfg()
cfg.LoginCookieName = cookieName
service := ProvideCSRFFilter(cfg)
- handler := service.Middleware(log.New())(testHandler)
+ handler := service.Middleware()(testHandler)
handler.ServeHTTP(rr, req)
return rr
}
diff --git a/pkg/models/dashboard_queries.go b/pkg/models/dashboard_queries.go
index f0b470acb19..5f3338f00a2 100644
--- a/pkg/models/dashboard_queries.go
+++ b/pkg/models/dashboard_queries.go
@@ -10,7 +10,7 @@ func GetUniqueDashboardDatasourceUids(dashboard *simplejson.Json) []string {
for _, panelObj := range dashboard.Get("panels").MustArray() {
panel := simplejson.NewFromAny(panelObj)
- uid := panel.Get("datasource").Get("uid").MustString()
+ uid := GetDataSourceUidFromJson(panel)
// if uid is for a mixed datasource, get the datasource uids from the targets
if uid == "-- Mixed --" {
@@ -44,8 +44,11 @@ func GroupQueriesByPanelId(dashboard *simplejson.Json) map[int64][]*simplejson.J
for _, queryObj := range panel.Get("targets").MustArray() {
query := simplejson.NewFromAny(queryObj)
+ // if query target has no datasource, set it to have the datasource on the panel
if _, ok := query.CheckGet("datasource"); !ok {
- query.Set("datasource", panel.Get("datasource"))
+ uid := GetDataSourceUidFromJson(panel)
+ datasource := map[string]interface{}{"type": "public-ds", "uid": uid}
+ query.Set("datasource", datasource)
}
panelQueries = append(panelQueries, query)
@@ -61,13 +64,8 @@ func GroupQueriesByDataSource(queries []*simplejson.Json) (result [][]*simplejso
byDataSource := make(map[string][]*simplejson.Json)
for _, query := range queries {
- dataSourceUid, err := query.GetPath("datasource", "uid").String()
-
- if err != nil {
- continue
- }
-
- byDataSource[dataSourceUid] = append(byDataSource[dataSourceUid], query)
+ uid := GetDataSourceUidFromJson(query)
+ byDataSource[uid] = append(byDataSource[uid], query)
}
for _, queries := range byDataSource {
@@ -76,3 +74,14 @@ func GroupQueriesByDataSource(queries []*simplejson.Json) (result [][]*simplejso
return
}
+
+func GetDataSourceUidFromJson(query *simplejson.Json) string {
+ uid := query.Get("datasource").Get("uid").MustString()
+
+ // before 8.3 special types could be sent as datasource (expr)
+ if uid == "" {
+ uid = query.Get("datasource").MustString()
+ }
+
+ return uid
+}
diff --git a/pkg/models/dashboard_queries_test.go b/pkg/models/dashboard_queries_test.go
index afa160515ab..553c58a6c37 100644
--- a/pkg/models/dashboard_queries_test.go
+++ b/pkg/models/dashboard_queries_test.go
@@ -20,6 +20,37 @@ const (
"schemaVersion": 35
}`
+ dashboardWithTargetsWithNoDatasources = `
+{
+ "panels": [
+ {
+ "id": 2,
+ "datasource": {
+ "type": "postgres",
+ "uid": "abc123"
+ },
+ "targets": [
+ {
+ "expr": "go_goroutines{job=\"$job\"}",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "A"
+ },
+ {
+ "exemplar": true,
+ "expr": "query2",
+ "interval": "",
+ "legendFormat": "",
+ "refId": "B"
+ }
+ ],
+ "title": "Panel Title",
+ "type": "timeseries"
+ }
+ ],
+ "schemaVersion": 35
+}`
+
dashboardWithQueries = `
{
"panels": [
@@ -256,6 +287,24 @@ func TestGetUniqueDashboardDatasourceUids(t *testing.T) {
}
func TestGroupQueriesByPanelId(t *testing.T) {
+ t.Run("can extract queries from dashboard with panel datasource string that has no datasource on panel targets", func(t *testing.T) {
+ json, err := simplejson.NewJson([]byte(oldStyleDashboard))
+ require.NoError(t, err)
+ queries := GroupQueriesByPanelId(json)
+
+ panelId := int64(2)
+ queriesByDatasource := GroupQueriesByDataSource(queries[panelId])
+ require.Len(t, queriesByDatasource[0], 1)
+ })
+ t.Run("can extract queries from dashboard with panel json datasource that has no datasource on panel targets", func(t *testing.T) {
+ json, err := simplejson.NewJson([]byte(dashboardWithTargetsWithNoDatasources))
+ require.NoError(t, err)
+ queries := GroupQueriesByPanelId(json)
+
+ panelId := int64(2)
+ queriesByDatasource := GroupQueriesByDataSource(queries[panelId])
+ require.Len(t, queriesByDatasource[0], 2)
+ })
t.Run("can extract no queries from empty dashboard", func(t *testing.T) {
json, err := simplejson.NewJson([]byte(`{"panels": {}}`))
require.NoError(t, err)
@@ -321,7 +370,10 @@ func TestGroupQueriesByPanelId(t *testing.T) {
query, err := queries[2][0].MarshalJSON()
require.NoError(t, err)
require.JSONEq(t, `{
- "datasource": "_yxMP8Ynk",
+ "datasource": {
+ "uid": "_yxMP8Ynk",
+ "type": "public-ds"
+ },
"exemplar": true,
"expr": "go_goroutines{job=\"$job\"}",
"interval": "",
diff --git a/pkg/plugins/adapters/adapters.go b/pkg/plugins/adapters/adapters.go
index 9f46067dadd..77b45c9c841 100644
--- a/pkg/plugins/adapters/adapters.go
+++ b/pkg/plugins/adapters/adapters.go
@@ -3,6 +3,7 @@ package adapters
import (
"encoding/json"
+ "fmt"
"github.com/grafana/grafana-plugin-sdk-go/backend"
@@ -11,16 +12,20 @@ import (
)
// ModelToInstanceSettings converts a datasources.DataSource to a backend.DataSourceInstanceSettings.
-func ModelToInstanceSettings(ds *datasources.DataSource, decryptFn func(ds *datasources.DataSource) map[string]string,
+func ModelToInstanceSettings(ds *datasources.DataSource, decryptFn func(ds *datasources.DataSource) (map[string]string, error),
) (*backend.DataSourceInstanceSettings, error) {
var jsonDataBytes json.RawMessage
if ds.JsonData != nil {
var err error
jsonDataBytes, err = ds.JsonData.MarshalJSON()
if err != nil {
- return nil, err
+ return nil, fmt.Errorf("failed to convert data source to instance settings: %w", err)
}
}
+ decrypted, err := decryptFn(ds)
+ if err != nil {
+ return nil, err
+ }
return &backend.DataSourceInstanceSettings{
ID: ds.Id,
@@ -32,9 +37,9 @@ func ModelToInstanceSettings(ds *datasources.DataSource, decryptFn func(ds *data
BasicAuthEnabled: ds.BasicAuth,
BasicAuthUser: ds.BasicAuthUser,
JSONData: jsonDataBytes,
- DecryptedSecureJSONData: decryptFn(ds),
+ DecryptedSecureJSONData: decrypted,
Updated: ds.Updated,
- }, nil
+ }, err
}
// BackendUserFromSignedInUser converts Grafana's SignedInUser model
diff --git a/pkg/plugins/plugincontext/plugincontext.go b/pkg/plugins/plugincontext/plugincontext.go
index 6ae57c650df..9e09a477236 100644
--- a/pkg/plugins/plugincontext/plugincontext.go
+++ b/pkg/plugins/plugincontext/plugincontext.go
@@ -127,12 +127,8 @@ func (p *Provider) getCachedPluginSettings(ctx context.Context, pluginID string,
return ps, nil
}
-func (p *Provider) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) map[string]string {
- return func(ds *datasources.DataSource) map[string]string {
- decryptedJsonData, err := p.dataSourceService.DecryptedValues(ctx, ds)
- if err != nil {
- p.logger.Error("Failed to decrypt secure json data", "error", err)
- }
- return decryptedJsonData
+func (p *Provider) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) (map[string]string, error) {
+ return func(ds *datasources.DataSource) (map[string]string, error) {
+ return p.dataSourceService.DecryptedValues(ctx, ds)
}
}
diff --git a/pkg/server/server.go b/pkg/server/server.go
index 53998302551..c28cd9e1de8 100644
--- a/pkg/server/server.go
+++ b/pkg/server/server.go
@@ -24,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/login/social"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/services/provisioning"
+ secretsMigrations "github.com/grafana/grafana/pkg/services/secrets/kvstore/migrations"
"github.com/grafana/grafana/pkg/setting"
"golang.org/x/sync/errgroup"
@@ -43,9 +44,10 @@ type Options struct {
func New(opts Options, cfg *setting.Cfg, httpServer *api.HTTPServer, roleRegistry accesscontrol.RoleRegistry,
provisioningService provisioning.ProvisioningService, backgroundServiceProvider registry.BackgroundServiceRegistry,
usageStatsProvidersRegistry registry.UsageStatsProvidersRegistry, statsCollectorService *statscollector.Service,
+ secretMigrationService secretsMigrations.SecretMigrationService,
) (*Server, error) {
statsCollectorService.RegisterProviders(usageStatsProvidersRegistry.GetServices())
- s, err := newServer(opts, cfg, httpServer, roleRegistry, provisioningService, backgroundServiceProvider)
+ s, err := newServer(opts, cfg, httpServer, roleRegistry, provisioningService, backgroundServiceProvider, secretMigrationService)
if err != nil {
return nil, err
}
@@ -59,25 +61,27 @@ func New(opts Options, cfg *setting.Cfg, httpServer *api.HTTPServer, roleRegistr
func newServer(opts Options, cfg *setting.Cfg, httpServer *api.HTTPServer, roleRegistry accesscontrol.RoleRegistry,
provisioningService provisioning.ProvisioningService, backgroundServiceProvider registry.BackgroundServiceRegistry,
+ secretMigrationService secretsMigrations.SecretMigrationService,
) (*Server, error) {
rootCtx, shutdownFn := context.WithCancel(context.Background())
childRoutines, childCtx := errgroup.WithContext(rootCtx)
s := &Server{
- context: childCtx,
- childRoutines: childRoutines,
- HTTPServer: httpServer,
- provisioningService: provisioningService,
- roleRegistry: roleRegistry,
- shutdownFn: shutdownFn,
- shutdownFinished: make(chan struct{}),
- log: log.New("server"),
- cfg: cfg,
- pidFile: opts.PidFile,
- version: opts.Version,
- commit: opts.Commit,
- buildBranch: opts.BuildBranch,
- backgroundServices: backgroundServiceProvider.GetServices(),
+ context: childCtx,
+ childRoutines: childRoutines,
+ HTTPServer: httpServer,
+ provisioningService: provisioningService,
+ roleRegistry: roleRegistry,
+ shutdownFn: shutdownFn,
+ shutdownFinished: make(chan struct{}),
+ log: log.New("server"),
+ cfg: cfg,
+ pidFile: opts.PidFile,
+ version: opts.Version,
+ commit: opts.Commit,
+ buildBranch: opts.BuildBranch,
+ backgroundServices: backgroundServiceProvider.GetServices(),
+ secretMigrationService: secretMigrationService,
}
return s, nil
@@ -101,9 +105,10 @@ type Server struct {
buildBranch string
backgroundServices []registry.BackgroundService
- HTTPServer *api.HTTPServer
- roleRegistry accesscontrol.RoleRegistry
- provisioningService provisioning.ProvisioningService
+ HTTPServer *api.HTTPServer
+ roleRegistry accesscontrol.RoleRegistry
+ provisioningService provisioning.ProvisioningService
+ secretMigrationService secretsMigrations.SecretMigrationService
}
// init initializes the server and its services.
@@ -128,6 +133,10 @@ func (s *Server) init() error {
return err
}
+ if err := s.secretMigrationService.Migrate(s.context); err != nil {
+ return err
+ }
+
return s.provisioningService.RunInitProvisioners(s.context)
}
diff --git a/pkg/server/server_test.go b/pkg/server/server_test.go
index f1980a62f92..d2713eded97 100644
--- a/pkg/server/server_test.go
+++ b/pkg/server/server_test.go
@@ -7,9 +7,12 @@ import (
"testing"
"time"
+ "github.com/grafana/grafana/pkg/infra/serverlock"
"github.com/grafana/grafana/pkg/registry"
"github.com/grafana/grafana/pkg/server/backgroundsvcs"
"github.com/grafana/grafana/pkg/services/accesscontrol/ossaccesscontrol"
+ "github.com/grafana/grafana/pkg/services/secrets/kvstore/migrations"
+ "github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/setting"
"github.com/stretchr/testify/require"
)
@@ -47,7 +50,11 @@ func (s *testService) IsDisabled() bool {
func testServer(t *testing.T, services ...registry.BackgroundService) *Server {
t.Helper()
- s, err := newServer(Options{}, setting.NewCfg(), nil, &ossaccesscontrol.OSSAccessControlService{}, nil, backgroundsvcs.NewBackgroundServiceRegistry(services...))
+ serverLockService := serverlock.ProvideService(sqlstore.InitTestDB(t))
+ secretMigrationService := &migrations.SecretMigrationServiceImpl{
+ ServerLockService: serverLockService,
+ }
+ s, err := newServer(Options{}, setting.NewCfg(), nil, &ossaccesscontrol.OSSAccessControlService{}, nil, backgroundsvcs.NewBackgroundServiceRegistry(services...), secretMigrationService)
require.NoError(t, err)
// Required to skip configuration initialization that causes
// DI errors in this test.
diff --git a/pkg/server/wire.go b/pkg/server/wire.go
index e7ce2a0a1b8..3c35be47e90 100644
--- a/pkg/server/wire.go
+++ b/pkg/server/wire.go
@@ -91,6 +91,7 @@ import (
"github.com/grafana/grafana/pkg/services/secrets"
secretsDatabase "github.com/grafana/grafana/pkg/services/secrets/database"
secretsStore "github.com/grafana/grafana/pkg/services/secrets/kvstore"
+ secretsMigrations "github.com/grafana/grafana/pkg/services/secrets/kvstore/migrations"
secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
secretsMigrator "github.com/grafana/grafana/pkg/services/secrets/migrator"
"github.com/grafana/grafana/pkg/services/serviceaccounts"
@@ -293,6 +294,9 @@ var wireBasicSet = wire.NewSet(
publicdashboardsApi.ProvideApi,
userimpl.ProvideService,
orgimpl.ProvideService,
+ datasourceservice.ProvideDataSourceMigrationService,
+ secretsMigrations.ProvideSecretMigrationService,
+ wire.Bind(new(secretsMigrations.SecretMigrationService), new(*secretsMigrations.SecretMigrationServiceImpl)),
)
var wireSet = wire.NewSet(
diff --git a/pkg/services/dashboards/service/dashboard_service.go b/pkg/services/dashboards/service/dashboard_service.go
index cda6ed922b0..97bb74d19a7 100644
--- a/pkg/services/dashboards/service/dashboard_service.go
+++ b/pkg/services/dashboards/service/dashboard_service.go
@@ -225,7 +225,7 @@ func (dr *DashboardServiceImpl) SaveProvisionedDashboard(ctx context.Context, dt
},
}
- cmd, err := dr.BuildSaveDashboardCommand(ctx, dto, true, false)
+ cmd, err := dr.BuildSaveDashboardCommand(ctx, dto, setting.IsLegacyAlertingEnabled(), false)
if err != nil {
return nil, err
}
@@ -243,14 +243,17 @@ func (dr *DashboardServiceImpl) SaveProvisionedDashboard(ctx context.Context, dt
OrgID: dto.OrgId,
}
- alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
- if err != nil {
- return nil, err
- }
+ // extract/save legacy alerts only if legacy alerting is enabled
+ if setting.IsLegacyAlertingEnabled() {
+ alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
+ if err != nil {
+ return nil, err
+ }
- err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
- if err != nil {
- return nil, err
+ err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
+ if err != nil {
+ return nil, err
+ }
}
if dto.Dashboard.Id == 0 {
@@ -284,14 +287,17 @@ func (dr *DashboardServiceImpl) SaveFolderForProvisionedDashboards(ctx context.C
OrgID: dto.OrgId,
}
- alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
- if err != nil {
- return nil, err
- }
+ // extract/save legacy alerts only if legacy alerting is enabled
+ if setting.IsLegacyAlertingEnabled() {
+ alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
+ if err != nil {
+ return nil, err
+ }
- err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
- if err != nil {
- return nil, err
+ err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
+ if err != nil {
+ return nil, err
+ }
}
if dto.Dashboard.Id == 0 {
@@ -312,7 +318,7 @@ func (dr *DashboardServiceImpl) SaveDashboard(ctx context.Context, dto *dashboar
dto.Dashboard.Data.Set("refresh", setting.MinRefreshInterval)
}
- cmd, err := dr.BuildSaveDashboardCommand(ctx, dto, true, !allowUiUpdate)
+ cmd, err := dr.BuildSaveDashboardCommand(ctx, dto, setting.IsLegacyAlertingEnabled(), !allowUiUpdate)
if err != nil {
return nil, err
}
@@ -328,14 +334,17 @@ func (dr *DashboardServiceImpl) SaveDashboard(ctx context.Context, dto *dashboar
OrgID: dto.OrgId,
}
- alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
- if err != nil {
- return nil, err
- }
+ // extract/save legacy alerts only if legacy alerting is enabled
+ if setting.IsLegacyAlertingEnabled() {
+ alerts, err := dr.dashAlertExtractor.GetAlerts(ctx, dashAlertInfo)
+ if err != nil {
+ return nil, err
+ }
- err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
- if err != nil {
- return nil, err
+ err = dr.dashboardStore.SaveAlerts(ctx, dash.Id, alerts)
+ if err != nil {
+ return nil, err
+ }
}
// new dashboard created
diff --git a/pkg/services/dashboards/service/dashboard_service_test.go b/pkg/services/dashboards/service/dashboard_service_test.go
index 652d245f823..3ba8cee38a1 100644
--- a/pkg/services/dashboards/service/dashboard_service_test.go
+++ b/pkg/services/dashboards/service/dashboard_service_test.go
@@ -7,6 +7,7 @@ import (
"github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
+ "github.com/xorcare/pointer"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/log"
@@ -23,7 +24,9 @@ func TestIntegrationDashboardService(t *testing.T) {
t.Run("Dashboard service tests", func(t *testing.T) {
fakeStore := dashboards.FakeDashboardStore{}
defer fakeStore.AssertExpectations(t)
+
service := &DashboardServiceImpl{
+ cfg: setting.NewCfg(),
log: log.New("test.logger"),
dashboardStore: &fakeStore,
dashAlertExtractor: &dummyDashAlertExtractor{},
@@ -100,7 +103,6 @@ func TestIntegrationDashboardService(t *testing.T) {
t.Run("Should not return validation error if dashboard is provisioned but UI updates allowed", func(t *testing.T) {
fakeStore.On("ValidateDashboardBeforeSave", mock.Anything, mock.Anything).Return(true, nil).Once()
fakeStore.On("SaveDashboard", mock.Anything).Return(&models.Dashboard{Data: simplejson.New()}, nil).Once()
- fakeStore.On("SaveAlerts", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once()
dto.Dashboard = models.NewDashboard("Dash")
dto.Dashboard.SetId(3)
@@ -110,6 +112,20 @@ func TestIntegrationDashboardService(t *testing.T) {
})
t.Run("Should return validation error if alert data is invalid", func(t *testing.T) {
+ origAlertingEnabledSet := setting.AlertingEnabled != nil
+ origAlertingEnabledVal := false
+ if origAlertingEnabledSet {
+ origAlertingEnabledVal = *setting.AlertingEnabled
+ }
+ setting.AlertingEnabled = pointer.Bool(true)
+ t.Cleanup(func() {
+ if !origAlertingEnabledSet {
+ setting.AlertingEnabled = nil
+ } else {
+ setting.AlertingEnabled = &origAlertingEnabledVal
+ }
+ })
+
fakeStore.On("ValidateDashboardBeforeSave", mock.Anything, mock.Anything).Return(true, nil).Once()
fakeStore.On("GetProvisionedDataByDashboardID", mock.Anything).Return(nil, nil).Once()
fakeStore.On("SaveDashboard", mock.Anything).Return(&models.Dashboard{Data: simplejson.New()}, nil).Once()
@@ -118,6 +134,7 @@ func TestIntegrationDashboardService(t *testing.T) {
dto.Dashboard = models.NewDashboard("Dash")
dto.User = &models.SignedInUser{UserId: 1}
_, err := service.SaveDashboard(context.Background(), dto, false)
+ require.Error(t, err)
require.Equal(t, err.Error(), "alert validation error")
})
})
@@ -128,7 +145,6 @@ func TestIntegrationDashboardService(t *testing.T) {
t.Run("Should not return validation error if dashboard is provisioned", func(t *testing.T) {
fakeStore.On("ValidateDashboardBeforeSave", mock.Anything, mock.Anything).Return(true, nil).Once()
fakeStore.On("SaveProvisionedDashboard", mock.Anything, mock.Anything).Return(&models.Dashboard{Data: simplejson.New()}, nil).Once()
- fakeStore.On("SaveAlerts", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once()
dto.Dashboard = models.NewDashboard("Dash")
dto.Dashboard.SetId(3)
@@ -140,7 +156,6 @@ func TestIntegrationDashboardService(t *testing.T) {
t.Run("Should override invalid refresh interval if dashboard is provisioned", func(t *testing.T) {
fakeStore.On("ValidateDashboardBeforeSave", mock.Anything, mock.Anything).Return(true, nil).Once()
fakeStore.On("SaveProvisionedDashboard", mock.Anything, mock.Anything).Return(&models.Dashboard{Data: simplejson.New()}, nil).Once()
- fakeStore.On("SaveAlerts", mock.Anything, mock.Anything, mock.Anything).Return(nil).Once()
oldRefreshInterval := setting.MinRefreshInterval
setting.MinRefreshInterval = "5m"
diff --git a/pkg/services/datasources/datasources.go b/pkg/services/datasources/datasources.go
index 239aedc42d5..1fb4695b2d3 100644
--- a/pkg/services/datasources/datasources.go
+++ b/pkg/services/datasources/datasources.go
@@ -18,6 +18,9 @@ type DataSourceService interface {
// GetDataSources gets datasources.
GetDataSources(ctx context.Context, query *GetDataSourcesQuery) error
+ // GetAllDataSources gets all datasources.
+ GetAllDataSources(ctx context.Context, query *GetAllDataSourcesQuery) error
+
// GetDataSourcesByType gets datasources by type.
GetDataSourcesByType(ctx context.Context, query *GetDataSourcesByTypeQuery) error
diff --git a/pkg/services/datasources/fakes/fake_datasource_service.go b/pkg/services/datasources/fakes/fake_datasource_service.go
index 7911e1539b5..4263e313fba 100644
--- a/pkg/services/datasources/fakes/fake_datasource_service.go
+++ b/pkg/services/datasources/fakes/fake_datasource_service.go
@@ -11,8 +11,9 @@ import (
)
type FakeDataSourceService struct {
- lastId int64
- DataSources []*datasources.DataSource
+ lastId int64
+ DataSources []*datasources.DataSource
+ SimulatePluginFailure bool
}
var _ datasources.DataSourceService = &FakeDataSourceService{}
@@ -41,6 +42,11 @@ func (s *FakeDataSourceService) GetDataSources(ctx context.Context, query *datas
return nil
}
+func (s *FakeDataSourceService) GetAllDataSources(ctx context.Context, query *datasources.GetAllDataSourcesQuery) error {
+ query.Result = s.DataSources
+ return nil
+}
+
func (s *FakeDataSourceService) GetDataSourcesByType(ctx context.Context, query *datasources.GetDataSourcesByTypeQuery) error {
for _, datasource := range s.DataSources {
typeMatch := query.Type != "" && query.Type == datasource.Type
@@ -107,6 +113,9 @@ func (s *FakeDataSourceService) GetHTTPTransport(ctx context.Context, ds *dataso
}
func (s *FakeDataSourceService) DecryptedValues(ctx context.Context, ds *datasources.DataSource) (map[string]string, error) {
+ if s.SimulatePluginFailure {
+ return nil, datasources.ErrDatasourceSecretsPluginUserFriendly{Err: "unknown error"}
+ }
values := make(map[string]string)
return values, nil
}
diff --git a/pkg/services/datasources/models.go b/pkg/services/datasources/models.go
index 68b938da257..63ff0689710 100644
--- a/pkg/services/datasources/models.go
+++ b/pkg/services/datasources/models.go
@@ -160,6 +160,10 @@ type GetDataSourcesQuery struct {
Result []*DataSource
}
+type GetAllDataSourcesQuery struct {
+ Result []*DataSource
+}
+
type GetDataSourcesByTypeQuery struct {
Type string
Result []*DataSource
diff --git a/pkg/services/datasources/service/datasource_service.go b/pkg/services/datasources/service/datasource_service.go
index 675a154b970..72fb7f6a309 100644
--- a/pkg/services/datasources/service/datasource_service.go
+++ b/pkg/services/datasources/service/datasource_service.go
@@ -15,6 +15,7 @@ import (
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/infra/httpclient"
+ "github.com/grafana/grafana/pkg/infra/log"
"github.com/grafana/grafana/pkg/services/accesscontrol"
"github.com/grafana/grafana/pkg/services/datasources"
"github.com/grafana/grafana/pkg/services/featuremgmt"
@@ -32,6 +33,7 @@ type Service struct {
features featuremgmt.FeatureToggles
permissionsService accesscontrol.DatasourcePermissionsService
ac accesscontrol.AccessControl
+ logger log.Logger
ptc proxyTransportCache
}
@@ -61,6 +63,7 @@ func ProvideService(
features: features,
permissionsService: datasourcePermissionsService,
ac: ac,
+ logger: log.New("datasources"),
}
ac.RegisterScopeAttributeResolver(NewNameScopeResolver(store))
@@ -136,6 +139,10 @@ func (s *Service) GetDataSources(ctx context.Context, query *datasources.GetData
return s.SQLStore.GetDataSources(ctx, query)
}
+func (s *Service) GetAllDataSources(ctx context.Context, query *datasources.GetAllDataSourcesQuery) error {
+ return s.SQLStore.GetAllDataSources(ctx, query)
+}
+
func (s *Service) GetDataSourcesByType(ctx context.Context, query *datasources.GetDataSourcesByTypeQuery) error {
return s.SQLStore.GetDataSourcesByType(ctx, query)
}
@@ -143,18 +150,21 @@ func (s *Service) GetDataSourcesByType(ctx context.Context, query *datasources.G
func (s *Service) AddDataSource(ctx context.Context, cmd *datasources.AddDataSourceCommand) error {
return s.SQLStore.InTransaction(ctx, func(ctx context.Context) error {
var err error
- // this is here for backwards compatibility
- cmd.EncryptedSecureJsonData, err = s.SecretsService.EncryptJsonData(ctx, cmd.SecureJsonData, secrets.WithoutScope())
- if err != nil {
- return err
- }
- secret, err := json.Marshal(cmd.SecureJsonData)
- if err != nil {
- return err
+ cmd.EncryptedSecureJsonData = make(map[string][]byte)
+ if !s.features.IsEnabled(featuremgmt.FlagDisableSecretsCompatibility) {
+ cmd.EncryptedSecureJsonData, err = s.SecretsService.EncryptJsonData(ctx, cmd.SecureJsonData, secrets.WithoutScope())
+ if err != nil {
+ return err
+ }
}
cmd.UpdateSecretFn = func() error {
+ secret, err := json.Marshal(cmd.SecureJsonData)
+ if err != nil {
+ return err
+ }
+
return s.SecretsStore.Set(ctx, cmd.OrgId, cmd.Name, secretType, string(secret))
}
@@ -212,21 +222,22 @@ func (s *Service) UpdateDataSource(ctx context.Context, cmd *datasources.UpdateD
return err
}
- secret, err := json.Marshal(cmd.SecureJsonData)
- if err != nil {
- return err
- }
+ if cmd.OrgId > 0 && cmd.Name != "" {
+ cmd.UpdateSecretFn = func() error {
+ secret, err := json.Marshal(cmd.SecureJsonData)
+ if err != nil {
+ return err
+ }
- cmd.UpdateSecretFn = func() error {
- var secretsErr error
- if query.Result.Name != cmd.Name {
- secretsErr = s.SecretsStore.Rename(ctx, cmd.OrgId, query.Result.Name, secretType, cmd.Name)
- }
- if secretsErr != nil {
- return secretsErr
- }
+ if query.Result.Name != cmd.Name {
+ err := s.SecretsStore.Rename(ctx, cmd.OrgId, query.Result.Name, secretType, cmd.Name)
+ if err != nil {
+ return err
+ }
+ }
- return s.SecretsStore.Set(ctx, cmd.OrgId, cmd.Name, secretType, string(secret))
+ return s.SecretsStore.Set(ctx, cmd.OrgId, cmd.Name, secretType, string(secret))
+ }
}
return s.SQLStore.UpdateDataSource(ctx, cmd)
@@ -295,10 +306,13 @@ func (s *Service) DecryptedValues(ctx context.Context, ds *datasources.DataSourc
if exist {
err = json.Unmarshal([]byte(secret), &decryptedValues)
+ if err != nil {
+ s.logger.Debug("failed to unmarshal secret value, using legacy secrets", "err", err)
+ }
}
- if (!exist || err != nil) && len(ds.SecureJsonData) > 0 {
- decryptedValues, err = s.MigrateSecrets(ctx, ds)
+ if !exist || err != nil {
+ decryptedValues, err = s.decryptLegacySecrets(ctx, ds)
if err != nil {
return nil, err
}
@@ -307,7 +321,7 @@ func (s *Service) DecryptedValues(ctx context.Context, ds *datasources.DataSourc
return decryptedValues, nil
}
-func (s *Service) MigrateSecrets(ctx context.Context, ds *datasources.DataSource) (map[string]string, error) {
+func (s *Service) decryptLegacySecrets(ctx context.Context, ds *datasources.DataSource) (map[string]string, error) {
secureJsonData := make(map[string]string)
for k, v := range ds.SecureJsonData {
decrypted, err := s.SecretsService.Decrypt(ctx, v)
@@ -316,14 +330,7 @@ func (s *Service) MigrateSecrets(ctx context.Context, ds *datasources.DataSource
}
secureJsonData[k] = string(decrypted)
}
-
- jsonData, err := json.Marshal(secureJsonData)
- if err != nil {
- return nil, err
- }
-
- err = s.SecretsStore.Set(ctx, ds.OrgId, ds.Name, secretType, string(jsonData))
- return secureJsonData, err
+ return secureJsonData, nil
}
func (s *Service) DecryptedValue(ctx context.Context, ds *datasources.DataSource, key string) (string, bool, error) {
@@ -564,10 +571,12 @@ func (s *Service) fillWithSecureJSONData(ctx context.Context, cmd *datasources.U
}
}
- // this is here for backwards compatibility
- cmd.EncryptedSecureJsonData, err = s.SecretsService.EncryptJsonData(ctx, cmd.SecureJsonData, secrets.WithoutScope())
- if err != nil {
- return err
+ cmd.EncryptedSecureJsonData = make(map[string][]byte)
+ if !s.features.IsEnabled(featuremgmt.FlagDisableSecretsCompatibility) {
+ cmd.EncryptedSecureJsonData, err = s.SecretsService.EncryptJsonData(ctx, cmd.SecureJsonData, secrets.WithoutScope())
+ if err != nil {
+ return err
+ }
}
return nil
diff --git a/pkg/services/datasources/service/secrets_mig.go b/pkg/services/datasources/service/secrets_mig.go
new file mode 100644
index 00000000000..5075ee6367a
--- /dev/null
+++ b/pkg/services/datasources/service/secrets_mig.go
@@ -0,0 +1,100 @@
+package service
+
+import (
+ "context"
+
+ "github.com/grafana/grafana/pkg/infra/kvstore"
+ "github.com/grafana/grafana/pkg/services/datasources"
+ "github.com/grafana/grafana/pkg/services/featuremgmt"
+)
+
+const (
+ // Not set means migration has not happened
+ secretMigrationStatusKey = "secretMigrationStatus"
+ // Migration happened with disableSecretCompatibility set to false
+ compatibleSecretMigrationValue = "compatible"
+ // Migration happened with disableSecretCompatibility set to true
+ completeSecretMigrationValue = "complete"
+)
+
+type DataSourceSecretMigrationService struct {
+ dataSourcesService datasources.DataSourceService
+ kvStore *kvstore.NamespacedKVStore
+ features featuremgmt.FeatureToggles
+}
+
+func ProvideDataSourceMigrationService(
+ dataSourcesService datasources.DataSourceService,
+ kvStore kvstore.KVStore,
+ features featuremgmt.FeatureToggles,
+) *DataSourceSecretMigrationService {
+ return &DataSourceSecretMigrationService{
+ dataSourcesService: dataSourcesService,
+ kvStore: kvstore.WithNamespace(kvStore, 0, secretType),
+ features: features,
+ }
+}
+
+func (s *DataSourceSecretMigrationService) Migrate(ctx context.Context) error {
+ migrationStatus, _, err := s.kvStore.Get(ctx, secretMigrationStatusKey)
+ if err != nil {
+ return err
+ }
+
+ // If this flag is true, delete secrets from the legacy secrets store as they are migrated
+ disableSecretsCompatibility := s.features.IsEnabled(featuremgmt.FlagDisableSecretsCompatibility)
+ // If migration hasn't happened, migrate to unified secrets and keep copy in legacy
+ // If a complete migration happened and now backwards compatibility is enabled, copy secrets back to legacy
+ needCompatibility := migrationStatus != compatibleSecretMigrationValue && !disableSecretsCompatibility
+ // If migration hasn't happened, migrate to unified secrets and delete from legacy
+ // If a compatible migration happened and now compatibility is disabled, delete secrets from legacy
+ needMigration := migrationStatus != completeSecretMigrationValue && disableSecretsCompatibility
+
+ if needCompatibility || needMigration {
+ query := &datasources.GetAllDataSourcesQuery{}
+ err := s.dataSourcesService.GetAllDataSources(ctx, query)
+ if err != nil {
+ return err
+ }
+
+ for _, ds := range query.Result {
+ secureJsonData, err := s.dataSourcesService.DecryptedValues(ctx, ds)
+ if err != nil {
+ return err
+ }
+
+ // Secrets are set by the update data source function if the SecureJsonData is set in the command
+ // Secrets are deleted by the update data source function if the disableSecretsCompatibility flag is enabled
+ err = s.dataSourcesService.UpdateDataSource(ctx, &datasources.UpdateDataSourceCommand{
+ Id: ds.Id,
+ OrgId: ds.OrgId,
+ Uid: ds.Uid,
+ Name: ds.Name,
+ JsonData: ds.JsonData,
+ SecureJsonData: secureJsonData,
+
+ // These are needed by the SQL function due to UseBool and MustCols
+ IsDefault: ds.IsDefault,
+ BasicAuth: ds.BasicAuth,
+ WithCredentials: ds.WithCredentials,
+ ReadOnly: ds.ReadOnly,
+ User: ds.User,
+ })
+ if err != nil {
+ return err
+ }
+ }
+
+ if disableSecretsCompatibility {
+ err = s.kvStore.Set(ctx, secretMigrationStatusKey, completeSecretMigrationValue)
+ } else {
+ err = s.kvStore.Set(ctx, secretMigrationStatusKey, compatibleSecretMigrationValue)
+ }
+
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
diff --git a/pkg/services/datasources/service/secrets_mig_test.go b/pkg/services/datasources/service/secrets_mig_test.go
new file mode 100644
index 00000000000..3c9054dd895
--- /dev/null
+++ b/pkg/services/datasources/service/secrets_mig_test.go
@@ -0,0 +1,340 @@
+package service
+
+import (
+ "context"
+ "testing"
+
+ "github.com/grafana/grafana/pkg/infra/kvstore"
+ acmock "github.com/grafana/grafana/pkg/services/accesscontrol/mock"
+ "github.com/grafana/grafana/pkg/services/datasources"
+ "github.com/grafana/grafana/pkg/services/featuremgmt"
+ "github.com/grafana/grafana/pkg/services/secrets/fakes"
+ secretsStore "github.com/grafana/grafana/pkg/services/secrets/kvstore"
+ secretsManager "github.com/grafana/grafana/pkg/services/secrets/manager"
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+ "github.com/grafana/grafana/pkg/setting"
+ "github.com/stretchr/testify/assert"
+)
+
+func SetupTestMigrationService(t *testing.T, sqlStore *sqlstore.SQLStore, kvStore kvstore.KVStore, secretsStore secretsStore.SecretsKVStore, compatibility bool) *DataSourceSecretMigrationService {
+ t.Helper()
+ cfg := &setting.Cfg{}
+ features := featuremgmt.WithFeatures()
+ if !compatibility {
+ features = featuremgmt.WithFeatures(featuremgmt.FlagDisableSecretsCompatibility, true)
+ }
+ secretsService := secretsManager.SetupTestService(t, fakes.NewFakeSecretsStore())
+ dsService := ProvideService(sqlStore, secretsService, secretsStore, cfg, features, acmock.New().WithDisabled(), acmock.NewMockedPermissionsService())
+ migService := ProvideDataSourceMigrationService(dsService, kvStore, features)
+ return migService
+}
+
+func TestMigrate(t *testing.T) {
+ t.Run("should migrate from legacy to unified without compatibility", func(t *testing.T) {
+ sqlStore := sqlstore.InitTestDB(t)
+ kvStore := kvstore.ProvideService(sqlStore)
+ secretsStore := secretsStore.SetupTestService(t)
+ migService := SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, false)
+
+ dataSourceName := "Test"
+ dataSourceOrg := int64(1)
+
+ // Add test data source
+ err := sqlStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{
+ OrgId: dataSourceOrg,
+ Name: dataSourceName,
+ Type: datasources.DS_MYSQL,
+ Access: datasources.DS_ACCESS_DIRECT,
+ Url: "http://test",
+ EncryptedSecureJsonData: map[string][]byte{
+ "password": []byte("9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"),
+ },
+ })
+ assert.NoError(t, err)
+
+ // Check if the secret json data was added
+ query := &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the migration status key is empty
+ value, exist, err := kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Check that the secret is not present on the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Run the migration
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was deleted
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.Empty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, completeSecretMigrationValue, value)
+ assert.True(t, exist)
+ })
+
+ t.Run("should migrate from legacy to unified with compatibility", func(t *testing.T) {
+ sqlStore := sqlstore.InitTestDB(t)
+ kvStore := kvstore.ProvideService(sqlStore)
+ secretsStore := secretsStore.SetupTestService(t)
+ migService := SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, true)
+
+ dataSourceName := "Test"
+ dataSourceOrg := int64(1)
+
+ // Add test data source
+ err := sqlStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{
+ OrgId: dataSourceOrg,
+ Name: dataSourceName,
+ Type: datasources.DS_MYSQL,
+ Access: datasources.DS_ACCESS_DIRECT,
+ Url: "http://test",
+ EncryptedSecureJsonData: map[string][]byte{
+ "password": []byte("9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"),
+ },
+ })
+ assert.NoError(t, err)
+
+ // Check if the secret json data was added
+ query := &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the migration status key is empty
+ value, exist, err := kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Check that the secret is not present on the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Run the migration
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was maintained for compatibility
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, compatibleSecretMigrationValue, value)
+ assert.True(t, exist)
+ })
+
+ t.Run("should replicate from unified to legacy for compatibility", func(t *testing.T) {
+ sqlStore := sqlstore.InitTestDB(t)
+ kvStore := kvstore.ProvideService(sqlStore)
+ secretsStore := secretsStore.SetupTestService(t)
+ migService := SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, false)
+
+ dataSourceName := "Test"
+ dataSourceOrg := int64(1)
+
+ // Add test data source
+ err := sqlStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{
+ OrgId: dataSourceOrg,
+ Name: dataSourceName,
+ Type: datasources.DS_MYSQL,
+ Access: datasources.DS_ACCESS_DIRECT,
+ Url: "http://test",
+ EncryptedSecureJsonData: map[string][]byte{
+ "password": []byte("9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"),
+ },
+ })
+ assert.NoError(t, err)
+
+ // Check if the secret json data was added
+ query := &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the migration status key is empty
+ value, exist, err := kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Check that the secret is not present on the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Run the migration without compatibility
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was deleted
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.Empty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, completeSecretMigrationValue, value)
+ assert.True(t, exist)
+
+ // Run the migration with compatibility
+ migService = SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, true)
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was re-added for compatibility
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, compatibleSecretMigrationValue, value)
+ assert.True(t, exist)
+ })
+
+ t.Run("should delete from legacy to remove compatibility", func(t *testing.T) {
+ sqlStore := sqlstore.InitTestDB(t)
+ kvStore := kvstore.ProvideService(sqlStore)
+ secretsStore := secretsStore.SetupTestService(t)
+ migService := SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, true)
+
+ dataSourceName := "Test"
+ dataSourceOrg := int64(1)
+
+ // Add test data source
+ err := sqlStore.AddDataSource(context.Background(), &datasources.AddDataSourceCommand{
+ OrgId: dataSourceOrg,
+ Name: dataSourceName,
+ Type: datasources.DS_MYSQL,
+ Access: datasources.DS_ACCESS_DIRECT,
+ Url: "http://test",
+ EncryptedSecureJsonData: map[string][]byte{
+ "password": []byte("9f86d081884c7d659a2feaa0c55ad015a3bf4f1b2b0b822cd15d6c15b0f00a08"),
+ },
+ })
+ assert.NoError(t, err)
+
+ // Check if the secret json data was added
+ query := &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the migration status key is empty
+ value, exist, err := kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Check that the secret is not present on the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.Empty(t, value)
+ assert.False(t, exist)
+
+ // Run the migration with compatibility
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was maintained for compatibility
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.NotEmpty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, compatibleSecretMigrationValue, value)
+ assert.True(t, exist)
+
+ // Run the migration without compatibility
+ migService = SetupTestMigrationService(t, sqlStore, kvStore, secretsStore, false)
+ err = migService.Migrate(context.Background())
+ assert.NoError(t, err)
+
+ // Check if the secure json data was deleted
+ query = &datasources.GetDataSourceQuery{OrgId: dataSourceOrg, Name: dataSourceName}
+ err = sqlStore.GetDataSource(context.Background(), query)
+ assert.NoError(t, err)
+ assert.NotNil(t, query.Result)
+ assert.Empty(t, query.Result.SecureJsonData)
+
+ // Check if the secret was added to the secret store
+ value, exist, err = secretsStore.Get(context.Background(), dataSourceOrg, dataSourceName, secretType)
+ assert.NoError(t, err)
+ assert.NotEmpty(t, value)
+ assert.True(t, exist)
+
+ // Check if the migration status key was set
+ value, exist, err = kvStore.Get(context.Background(), 0, secretType, secretMigrationStatusKey)
+ assert.NoError(t, err)
+ assert.Equal(t, completeSecretMigrationValue, value)
+ assert.True(t, exist)
+ })
+}
diff --git a/pkg/services/export/commit_helper.go b/pkg/services/export/commit_helper.go
index ae5d05edfe2..5848db9dd08 100644
--- a/pkg/services/export/commit_helper.go
+++ b/pkg/services/export/commit_helper.go
@@ -26,6 +26,7 @@ type commitHelper struct {
users map[int64]*userInfo
stopRequested bool
broadcast func(path string)
+ exporter string // key for the current exporter
}
type commitBody struct {
diff --git a/pkg/services/export/export_alerts.go b/pkg/services/export/export_alerts.go
new file mode 100644
index 00000000000..2ba0b10cf88
--- /dev/null
+++ b/pkg/services/export/export_alerts.go
@@ -0,0 +1,51 @@
+package export
+
+import (
+ "encoding/json"
+ "fmt"
+ "path"
+ "time"
+
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+)
+
+func exportAlerts(helper *commitHelper, job *gitExportJob) error {
+ alertDir := path.Join(helper.orgDir, "alerts")
+
+ return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
+ type ruleResult struct {
+ Title string `xorm:"title"`
+ UID string `xorm:"uid"`
+ NamespaceUID string `xorm:"namespace_uid"`
+ RuleGroup string `xorm:"rule_group"`
+ Condition json.RawMessage `xorm:"data"`
+ DashboardUID string `xorm:"dashboard_uid"`
+ PanelID int64 `xorm:"panel_id"`
+ Updated time.Time `xorm:"updated" json:"-"`
+ }
+
+ rows := make([]*ruleResult, 0)
+
+ sess.Table("alert_rule").Where("org_id = ?", helper.orgID)
+
+ err := sess.Find(&rows)
+ if err != nil {
+ return err
+ }
+
+ for _, row := range rows {
+ err = helper.add(commitOptions{
+ body: []commitBody{{
+ body: prettyJSON(row),
+ fpath: path.Join(alertDir, row.UID) + ".json", // must be JSON files
+ }},
+ comment: fmt.Sprintf("Alert: %s", row.Title),
+ when: row.Updated,
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return err
+ })
+}
diff --git a/pkg/services/export/export_anno.go b/pkg/services/export/export_anno.go
index 4f3d0f96684..aa07d025170 100644
--- a/pkg/services/export/export_anno.go
+++ b/pkg/services/export/export_anno.go
@@ -105,24 +105,26 @@ func exportAnnotations(helper *commitHelper, job *gitExportJob) error {
}
}
- frame := data.NewFrame("", f_ID, f_DashboardID, f_PanelID, f_Epoch, f_EpochEnd, f_Text, f_Tags)
- js, err := jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(frame, "", " ")
- if err != nil {
- return err
- }
+ if f_ID.Len() > 0 {
+ frame := data.NewFrame("", f_ID, f_DashboardID, f_PanelID, f_Epoch, f_EpochEnd, f_Text, f_Tags)
+ js, err := jsoniter.ConfigCompatibleWithStandardLibrary.MarshalIndent(frame, "", " ")
+ if err != nil {
+ return err
+ }
- err = helper.add(commitOptions{
- body: []commitBody{
- {
- fpath: filepath.Join(helper.orgDir, "annotations", "annotations.json"),
- body: js, // TODO, pretty?
+ err = helper.add(commitOptions{
+ body: []commitBody{
+ {
+ fpath: filepath.Join(helper.orgDir, "annotations", "annotations.json"),
+ body: js, // TODO, pretty?
+ },
},
- },
- when: time.Now(),
- comment: "Exported annotations",
- })
- if err != nil {
- return err
+ when: time.Now(),
+ comment: "Exported annotations",
+ })
+ if err != nil {
+ return err
+ }
}
return err
})
diff --git a/pkg/services/export/export_auth.go b/pkg/services/export/export_auth.go
index 37da1a92d63..8b345f42788 100644
--- a/pkg/services/export/export_auth.go
+++ b/pkg/services/export/export_auth.go
@@ -1,9 +1,12 @@
package export
import (
- "fmt"
"path"
+ "strconv"
+ "strings"
+ "github.com/grafana/grafana-plugin-sdk-go/data"
+ "github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
"github.com/grafana/grafana/pkg/services/sqlstore"
)
@@ -13,58 +16,119 @@ func dumpAuthTables(helper *commitHelper, job *gitExportJob) error {
comment: "auth tables dump",
}
- tables := []string{
- "user", // joined with "org_user" to get the role
- "user_role",
- "builtin_role",
- "api_key",
- "team", "team_group", "team_role", "team_member",
- "role",
- "temp_user",
- "user_auth_token", // no org_id... is it temporary?
- "permission",
+ type statsTables struct {
+ table string
+ sql string
+ converters []sqlutil.Converter
+ drop []string
}
- for _, table := range tables {
- switch table {
- case "permission":
- sess.Table(table).
- Join("left", "role", "permission.role_id = role.id").
- Cols("permission.*").
- Where("org_id = ?", helper.orgID).
- Asc("permission.id")
- case "user":
- sess.Table(table).
- Join("inner", "org_user", "user.id = org_user.user_id").
- Cols("user.*", "org_user.role").
- Where("org_user.org_id = ?", helper.orgID).
- Asc("user.id")
- case "user_auth_token":
- sess.Table(table).
- Join("inner", "org_user", "user_auth_token.id = org_user.user_id").
- Cols("user_auth_token.*").
- Where("org_user.org_id = ?", helper.orgID).
- Asc("user_auth_token.id")
- default:
- sess.Table(table).Where("org_id = ?", helper.orgID).Asc("id")
+ dump := []statsTables{
+ {
+ table: "user",
+ sql: `
+ SELECT user.*, org_user.role
+ FROM user
+ JOIN org_user ON user.id = org_user.user_id
+ WHERE org_user.org_id =` + strconv.FormatInt(helper.orgID, 10),
+ converters: []sqlutil.Converter{{Dynamic: true}},
+ drop: []string{
+ "id", "version",
+ "password", // UMMMMM... for now
+ "org_id",
+ },
+ },
+ {
+ table: "user_role",
+ sql: `
+ SELECT * FROM user_role
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ },
+ {
+ table: "builtin_role",
+ sql: `
+ SELECT * FROM builtin_role
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ },
+ {
+ table: "api_key",
+ sql: `
+ SELECT * FROM api_key
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ },
+ {
+ table: "permission",
+ sql: `
+ SELECT permission.*
+ FROM permission
+ JOIN role ON permission.role_id = role.id
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ },
+ {
+ table: "user_auth_token",
+ sql: `
+ SELECT user_auth_token.*
+ FROM user_auth_token
+ JOIN org_user ON user_auth_token.id = org_user.user_id
+ WHERE org_user.org_id =` + strconv.FormatInt(helper.orgID, 10),
+ },
+ {table: "team"},
+ {table: "team_group"},
+ {table: "team_role"},
+ {table: "team_member"},
+ {table: "temp_user"},
+ {table: "role"},
+ }
+
+ for _, auth := range dump {
+ if auth.sql == "" {
+ auth.sql = `
+ SELECT * FROM ` + auth.table + `
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10)
+ }
+ if auth.converters == nil {
+ auth.converters = []sqlutil.Converter{{Dynamic: true}}
+ }
+ if auth.drop == nil {
+ auth.drop = []string{
+ "id",
+ "org_id",
+ }
}
- raw, err := sess.QueryInterface()
+ rows, err := sess.DB().QueryContext(helper.ctx, auth.sql)
if err != nil {
- return fmt.Errorf("unable to read: %s // %s", table, err.Error())
- }
- if len(raw) < 1 {
- continue // don't write empty files
+ if strings.HasPrefix(err.Error(), "no such table") {
+ continue
+ }
+ return err
}
- frame, err := queryResultToDataFrame(raw, frameOpts{
- skip: []string{"org_id", "version", "help_flags1", "theme"},
- })
+
+ frame, err := sqlutil.FrameFromRows(rows.Rows, -1, auth.converters...)
if err != nil {
return err
}
- frame.Name = table
+ if frame.Fields[0].Len() < 1 {
+ continue // do not write empty structures
+ }
+
+ if len(auth.drop) > 0 {
+ lookup := make(map[string]bool, len(auth.drop))
+ for _, v := range auth.drop {
+ lookup[v] = true
+ }
+ fields := make([]*data.Field, 0, len(frame.Fields))
+ for _, f := range frame.Fields {
+ if lookup[f.Name] {
+ continue
+ }
+ fields = append(fields, f)
+ }
+ frame.Fields = fields
+ }
+ frame.Name = auth.table
commit.body = append(commit.body, commitBody{
- fpath: path.Join(helper.orgDir, "auth", "sql.dump", table+".json"),
+ fpath: path.Join(helper.orgDir, "auth", "sql.dump", auth.table+".json"),
frame: frame,
})
}
diff --git a/pkg/services/export/export_dashboards.go b/pkg/services/export/export_dash.go
similarity index 98%
rename from pkg/services/export/export_dashboards.go
rename to pkg/services/export/export_dash.go
index b8deeb4a6f3..fc5190f1f25 100644
--- a/pkg/services/export/export_dashboards.go
+++ b/pkg/services/export/export_dash.go
@@ -151,7 +151,7 @@ func exportDashboards(helper *commitHelper, job *gitExportJob) error {
if job.cfg.KeepHistory {
sess.Table("dashboard_version").
Join("INNER", "dashboard", "dashboard.id = dashboard_version.dashboard_id").
- Where("org_id = ?", job.orgID).
+ Where("org_id = ?", helper.orgID).
Cols("dashboard.id",
"dashboard_version.version",
"dashboard_version.created",
@@ -161,7 +161,7 @@ func exportDashboards(helper *commitHelper, job *gitExportJob) error {
Asc("dashboard_version.created")
} else {
sess.Table("dashboard").
- Where("org_id = ?", job.orgID).
+ Where("org_id = ?", helper.orgID).
Cols("id",
"version",
"created",
diff --git a/pkg/services/export/export_dash_thumbs.go b/pkg/services/export/export_dash_thumbs.go
new file mode 100644
index 00000000000..fe3143cb01b
--- /dev/null
+++ b/pkg/services/export/export_dash_thumbs.go
@@ -0,0 +1,80 @@
+package export
+
+import (
+ "encoding/json"
+ "fmt"
+ "io/ioutil"
+ "path/filepath"
+ "strings"
+ "time"
+
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+)
+
+func exportDashboardThumbnails(helper *commitHelper, job *gitExportJob) error {
+ alias := make(map[string]string, 100)
+ aliasLookup, err := ioutil.ReadFile(filepath.Join(helper.orgDir, "root-alias.json"))
+ if err != nil {
+ return fmt.Errorf("missing dashboard alias files (must export dashboards first)")
+ }
+ err = json.Unmarshal(aliasLookup, &alias)
+ if err != nil {
+ return err
+ }
+
+ return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
+ type dashboardThumb struct {
+ UID string `xorm:"uid"`
+ Image []byte `xorm:"image"`
+ Theme string `xorm:"theme"`
+ Kind string `xorm:"kind"`
+ MimeType string `xorm:"mime_type"`
+ Updated time.Time
+ }
+
+ rows := make([]*dashboardThumb, 0)
+
+ // SELECT uid,image,theme,kind,mime_type,dashboard_thumbnail.updated
+ // FROM dashboard_thumbnail
+ // JOIN dashboard ON dashboard.id = dashboard_thumbnail.dashboard_id
+ // WHERE org_id = 2; //dashboard.uid = '2VVbg06nz';
+
+ sess.Table("dashboard_thumbnail").
+ Join("INNER", "dashboard", "dashboard.id = dashboard_thumbnail.dashboard_id").
+ Cols("uid", "image", "theme", "kind", "mime_type", "dashboard_thumbnail.updated").
+ Where("dashboard.org_id = ?", helper.orgID)
+
+ err := sess.Find(&rows)
+ if err != nil {
+ if strings.HasPrefix(err.Error(), "no such table") {
+ return nil
+ }
+ return err
+ }
+
+ // Process all folders
+ for _, row := range rows {
+ p, ok := alias[row.UID]
+ if !ok {
+ p = "uid/" + row.UID
+ } else {
+ p = strings.TrimSuffix(p, "-dash.json")
+ }
+
+ err := helper.add(commitOptions{
+ body: []commitBody{
+ {
+ fpath: filepath.Join(helper.orgDir, "thumbs", fmt.Sprintf("%s.thumb-%s.png", p, row.Theme)),
+ body: row.Image,
+ },
+ },
+ when: row.Updated,
+ comment: "Thumbnail",
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return nil
+ })
+}
diff --git a/pkg/services/export/export_ds.go b/pkg/services/export/export_ds.go
index e1aacd524aa..c7b9e5ca49c 100644
--- a/pkg/services/export/export_ds.go
+++ b/pkg/services/export/export_ds.go
@@ -10,7 +10,7 @@ import (
func exportDataSources(helper *commitHelper, job *gitExportJob) error {
cmd := &datasources.GetDataSourcesQuery{
- OrgId: job.orgID,
+ OrgId: helper.orgID,
}
err := job.sql.GetDataSources(helper.ctx, cmd)
if err != nil {
diff --git a/pkg/services/export/export_live.go b/pkg/services/export/export_live.go
index 352ce24f56a..ab8ba408087 100644
--- a/pkg/services/export/export_live.go
+++ b/pkg/services/export/export_live.go
@@ -3,6 +3,7 @@ package export
import (
"fmt"
"path"
+ "strings"
"time"
"github.com/grafana/grafana/pkg/services/sqlstore"
@@ -25,6 +26,9 @@ func exportLive(helper *commitHelper, job *gitExportJob) error {
err := sess.Find(&rows)
if err != nil {
+ if strings.HasPrefix(err.Error(), "no such table") {
+ return nil
+ }
return err
}
diff --git a/pkg/services/export/export_plugins.go b/pkg/services/export/export_plugins.go
new file mode 100644
index 00000000000..47a03910269
--- /dev/null
+++ b/pkg/services/export/export_plugins.go
@@ -0,0 +1,53 @@
+package export
+
+import (
+ "encoding/json"
+ "fmt"
+ "path"
+ "strings"
+ "time"
+
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+)
+
+func exportPlugins(helper *commitHelper, job *gitExportJob) error {
+ return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
+ type pResult struct {
+ PluginID string `xorm:"plugin_id" json:"-"`
+ Enabled string `xorm:"enabled" json:"enabled"`
+ Pinned string `xorm:"pinned" json:"pinned"`
+ JSONData json.RawMessage `xorm:"json_data" json:"json_data,omitempty"`
+ // TODO: secure!!!!
+ PluginVersion string `xorm:"plugin_version" json:"version"`
+ Created time.Time `xorm:"created" json:"created"`
+ Updated time.Time `xorm:"updated" json:"updated"`
+ }
+
+ rows := make([]*pResult, 0)
+
+ sess.Table("plugin_setting").Where("org_id = ?", helper.orgID)
+
+ err := sess.Find(&rows)
+ if err != nil {
+ if strings.HasPrefix(err.Error(), "no such table") {
+ return nil
+ }
+ return err
+ }
+
+ for _, row := range rows {
+ err = helper.add(commitOptions{
+ body: []commitBody{{
+ body: prettyJSON(row),
+ fpath: path.Join(helper.orgDir, "plugins", row.PluginID, "settings.json"),
+ }},
+ comment: fmt.Sprintf("Plugin: %s", row.PluginID),
+ when: row.Updated,
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return err
+ })
+}
diff --git a/pkg/services/export/export_snapshots.go b/pkg/services/export/export_snapshots.go
index fa2e05ce588..24195a23693 100644
--- a/pkg/services/export/export_snapshots.go
+++ b/pkg/services/export/export_snapshots.go
@@ -10,7 +10,7 @@ import (
func exportSnapshots(helper *commitHelper, job *gitExportJob) error {
cmd := &dashboardsnapshots.GetDashboardSnapshotsQuery{
- OrgId: job.orgID,
+ OrgId: helper.orgID,
Limit: 500000,
SignedInUser: nil,
}
diff --git a/pkg/services/export/export_sys_playlists.go b/pkg/services/export/export_sys_playlists.go
index 83943db9563..b2a441e8c7a 100644
--- a/pkg/services/export/export_sys_playlists.go
+++ b/pkg/services/export/export_sys_playlists.go
@@ -10,7 +10,7 @@ import (
func exportSystemPlaylists(helper *commitHelper, job *gitExportJob) error {
cmd := &models.GetPlaylistsQuery{
- OrgId: job.orgID,
+ OrgId: helper.orgID,
Limit: 500000,
}
err := job.sql.SearchPlaylists(helper.ctx, cmd)
diff --git a/pkg/services/export/export_sys_short_url.go b/pkg/services/export/export_sys_short_url.go
new file mode 100644
index 00000000000..3206e825122
--- /dev/null
+++ b/pkg/services/export/export_sys_short_url.go
@@ -0,0 +1,72 @@
+package export
+
+import (
+ "fmt"
+ "path/filepath"
+ "time"
+
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+)
+
+func exportSystemShortURL(helper *commitHelper, job *gitExportJob) error {
+ mostRecent := int64(0)
+ lastSeen := make(map[string]int64, 50)
+ dir := filepath.Join(helper.orgDir, "system", "short_url")
+
+ err := job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
+ type urlResult struct {
+ UID string `xorm:"uid" json:"-"`
+ Path string `xorm:"path" json:"path"`
+ CreatedBy int64 `xorm:"created_by" json:"-"`
+ CreatedAt time.Time `xorm:"created_at" json:"-"`
+ LastSeenAt int64 `xorm:"last_seen_at" json:"-"`
+ }
+
+ rows := make([]*urlResult, 0)
+
+ sess.Table("short_url").Where("org_id = ?", helper.orgID)
+
+ err := sess.Find(&rows)
+ if err != nil {
+ return err
+ }
+
+ for _, row := range rows {
+ if row.LastSeenAt > 0 {
+ lastSeen[row.UID] = row.LastSeenAt
+ if mostRecent < row.LastSeenAt {
+ mostRecent = row.LastSeenAt
+ }
+ }
+ err := helper.add(commitOptions{
+ body: []commitBody{
+ {
+ fpath: filepath.Join(dir, "uid", fmt.Sprintf("%s.json", row.UID)),
+ body: prettyJSON(row),
+ },
+ },
+ when: row.CreatedAt,
+ comment: "short URL",
+ userID: row.CreatedBy,
+ })
+ if err != nil {
+ return err
+ }
+ }
+ return err
+ })
+ if err != nil || len(lastSeen) < 1 {
+ return err
+ }
+
+ return helper.add(commitOptions{
+ body: []commitBody{
+ {
+ fpath: filepath.Join(dir, "last_seen_at.json"),
+ body: prettyJSON(lastSeen),
+ },
+ },
+ when: time.UnixMilli(mostRecent),
+ comment: "short URL",
+ })
+}
diff --git a/pkg/services/export/export_usage.go b/pkg/services/export/export_usage.go
new file mode 100644
index 00000000000..edae9a83d31
--- /dev/null
+++ b/pkg/services/export/export_usage.go
@@ -0,0 +1,85 @@
+package export
+
+import (
+ "path"
+ "strconv"
+ "strings"
+
+ "github.com/grafana/grafana-plugin-sdk-go/data/sqlutil"
+ "github.com/grafana/grafana/pkg/services/sqlstore"
+)
+
+func exportUsage(helper *commitHelper, job *gitExportJob) error {
+ return job.sql.WithDbSession(helper.ctx, func(sess *sqlstore.DBSession) error {
+ commit := commitOptions{
+ comment: "usage stats",
+ }
+
+ type statsTables struct {
+ table string
+ sql string
+ converters []sqlutil.Converter
+ }
+
+ dump := []statsTables{
+ {
+ table: "data_source_usage_by_day",
+ sql: `SELECT day,uid,queries,errors,load_duration_ms
+ FROM data_source_usage_by_day
+ JOIN data_source ON data_source.id = data_source_usage_by_day.data_source_id
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ converters: []sqlutil.Converter{{Dynamic: true}},
+ },
+ {
+ table: "dashboard_usage_by_day",
+ sql: `SELECT uid,day,views,queries,errors,load_duration
+ FROM dashboard_usage_by_day
+ JOIN dashboard ON dashboard_usage_by_day.dashboard_id = dashboard.id
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ converters: []sqlutil.Converter{{Dynamic: true}},
+ },
+ {
+ table: "dashboard_usage_sums",
+ sql: `SELECT uid,
+ views_last_1_days,
+ views_last_7_days,
+ views_last_30_days,
+ views_total,
+ queries_last_1_days,
+ queries_last_7_days,
+ queries_last_30_days,
+ queries_total,
+ errors_last_1_days,
+ errors_last_7_days,
+ errors_last_30_days,
+ errors_total
+ FROM dashboard_usage_sums
+ JOIN dashboard ON dashboard_usage_sums.dashboard_id = dashboard.id
+ WHERE org_id =` + strconv.FormatInt(helper.orgID, 10),
+ converters: []sqlutil.Converter{{Dynamic: true}},
+ },
+ }
+
+ for _, usage := range dump {
+ rows, err := sess.DB().QueryContext(helper.ctx, usage.sql)
+ if err != nil {
+ if strings.HasPrefix(err.Error(), "no such table") {
+ continue
+ }
+ return err
+ }
+
+ frame, err := sqlutil.FrameFromRows(rows.Rows, -1, usage.converters...)
+ if err != nil {
+ return err
+ }
+ frame.Name = usage.table
+ commit.body = append(commit.body, commitBody{
+ fpath: path.Join(helper.orgDir, "usage", usage.table+".json"),
+ frame: frame,
+ })
+ }
+
+ return helper.add(commit)
+ })
+}
diff --git a/pkg/services/export/frame_helper.go b/pkg/services/export/frame_helper.go
deleted file mode 100644
index 1f7a291d8e5..00000000000
--- a/pkg/services/export/frame_helper.go
+++ /dev/null
@@ -1,138 +0,0 @@
-package export
-
-import (
- "encoding/json"
- "fmt"
- "strings"
-
- "github.com/grafana/grafana-plugin-sdk-go/data"
-)
-
-type fieldInfo struct {
- Name string
- Conv data.FieldConverter
-}
-
-type frameOpts struct {
- schema []fieldInfo
- skip []string
-}
-
-func prettyJSON(v interface{}) []byte {
- b, _ := json.MarshalIndent(v, "", " ")
- return b
-}
-
-func queryResultToDataFrame(rows []map[string]interface{}, opts frameOpts) (*data.Frame, error) {
- count := len(rows)
- if count < 1 {
- return nil, nil // empty frame
- }
-
- schema := opts.schema
- if len(schema) < 1 {
- skip := make(map[string]bool, len(opts.skip))
- for _, k := range opts.skip {
- skip[k] = true
- }
-
- for k, v := range rows[0] {
- if skip[k] {
- continue
- }
- field := fieldInfo{
- Name: k,
- Conv: data.FieldConverter{
- OutputFieldType: data.FieldTypeFor(v),
- },
- }
- if field.Conv.OutputFieldType == data.FieldTypeUnknown {
- fmt.Printf("UNKNOWN type: %s / %v\n", k, v)
- continue
- }
-
- // Don't write passwords to disk for now!!!!
- if k == "password" || k == "salt" {
- field.Conv.Converter = func(v interface{}) (interface{}, error) {
- return fmt.Sprintf("<%s>", k), nil
- }
- }
-
- schema = append(schema, field)
- }
- }
-
- fields := make([]*data.Field, len(schema))
- for i, s := range schema {
- fields[i] = data.NewFieldFromFieldType(s.Conv.OutputFieldType, count)
- fields[i].Name = s.Name
- }
-
- var err error
- for i, row := range rows {
- for j, s := range schema {
- v, ok := row[s.Name]
- if ok && v != nil {
- if s.Conv.Converter != nil {
- v, err = s.Conv.Converter(v)
- if err != nil {
- return nil, fmt.Errorf("converting field: %s // %s", s.Name, err.Error())
- }
- }
- fields[j].Set(i, v)
- }
- }
- }
-
- // Fields are in random order
- if len(opts.schema) < 1 {
- last := []*data.Field{}
- frame := data.NewFrame("")
- lookup := make(map[string]*data.Field, len(fields))
- for _, f := range fields {
- if f.Name == "id" {
- frame.Fields = append(frame.Fields, f) // first
- continue
- }
- lookup[f.Name] = f
- }
-
- // First items
- for _, name := range []string{"name", "login", "email", "role", "description", "uid"} {
- f, ok := lookup[name]
- if ok {
- frame.Fields = append(frame.Fields, f) // first
- delete(lookup, name)
- }
- }
-
- // IDs
- for k, f := range lookup {
- if strings.HasSuffix(k, "_id") {
- frame.Fields = append(frame.Fields, f) // first
- delete(lookup, k)
- } else if strings.HasPrefix(k, "is_") {
- last = append(last, f) // first
- delete(lookup, k)
- }
- }
-
- // Last items
- for _, name := range []string{"created", "updated"} {
- f, ok := lookup[name]
- if ok {
- last = append(last, f) // first
- delete(lookup, name)
- }
- }
-
- // Rest
- for _, f := range lookup {
- frame.Fields = append(frame.Fields, f)
- }
-
- frame.Fields = append(frame.Fields, last...)
- return frame, nil
- }
- return data.NewFrame("", fields...), nil
-}
diff --git a/pkg/services/export/git_export_job.go b/pkg/services/export/git_export_job.go
index 55b6a5dc723..4b13b5e44c8 100644
--- a/pkg/services/export/git_export_job.go
+++ b/pkg/services/export/git_export_job.go
@@ -2,6 +2,7 @@ package export
import (
"context"
+ "encoding/json"
"fmt"
"path"
"sync"
@@ -22,7 +23,6 @@ type gitExportJob struct {
logger log.Logger
sql *sqlstore.SQLStore
dashboardsnapshotsService dashboardsnapshots.Service
- orgID int64
rootDir string
statusMu sync.Mutex
@@ -32,15 +32,12 @@ type gitExportJob struct {
helper *commitHelper
}
-type simpleExporter = func(helper *commitHelper, job *gitExportJob) error
-
func startGitExportJob(cfg ExportConfig, sql *sqlstore.SQLStore, dashboardsnapshotsService dashboardsnapshots.Service, rootDir string, orgID int64, broadcaster statusBroadcaster) (Job, error) {
job := &gitExportJob{
logger: log.New("git_export_job"),
cfg: cfg,
sql: sql,
dashboardsnapshotsService: dashboardsnapshotsService,
- orgID: orgID,
rootDir: rootDir,
broadcaster: broadcaster,
status: ExportStatus{
@@ -153,7 +150,7 @@ func (e *gitExportJob) doExportWithHistory() error {
return err
}
- err = e.doOrgExportWithHistory(e.helper)
+ err := e.process(exporters)
if err != nil {
return err
}
@@ -170,48 +167,40 @@ func (e *gitExportJob) doExportWithHistory() error {
return err
}
-func (e *gitExportJob) doOrgExportWithHistory(helper *commitHelper) error {
- include := e.cfg.Include
-
- exporters := []simpleExporter{}
- if include.Dash {
- exporters = append(exporters, exportDashboards)
- }
-
- if include.DS {
- exporters = append(exporters, exportDataSources)
- }
-
- if include.Auth {
- exporters = append(exporters, dumpAuthTables)
- }
-
- if include.Services {
- exporters = append(exporters, exportFiles,
- exportSystemPreferences,
- exportSystemStars,
- exportSystemPlaylists,
- exportKVStore,
- exportLive)
+func (e *gitExportJob) process(exporters []Exporter) error {
+ if false { // NEEDS a real user ID first
+ err := exportSnapshots(e.helper, e)
+ if err != nil {
+ return err
+ }
}
- if include.Anno {
- exporters = append(exporters, exportAnnotations)
- }
+ for _, exp := range exporters {
+ if e.cfg.Exclude[exp.Key] {
+ continue
+ }
- if include.Snapshots {
- exporters = append(exporters, exportSnapshots)
- }
+ if exp.process != nil {
+ e.status.Target = exp.Key
+ e.helper.exporter = exp.Key
+ err := exp.process(e.helper, e)
+ if err != nil {
+ return err
+ }
+ }
- for _, fn := range exporters {
- err := fn(helper, e)
- if err != nil {
- return err
+ if exp.Exporters != nil {
+ return e.process(exp.Exporters)
}
}
return nil
}
+func prettyJSON(v interface{}) []byte {
+ b, _ := json.MarshalIndent(v, "", " ")
+ return b
+}
+
/**
git remote add origin git@github.com:ryantxu/test-dash-repo.git
diff --git a/pkg/services/export/service.go b/pkg/services/export/service.go
index d39f5fc4fff..e8f5e185c7e 100644
--- a/pkg/services/export/service.go
+++ b/pkg/services/export/service.go
@@ -23,6 +23,9 @@ type ExportService interface {
// List folder contents
HandleGetStatus(c *models.ReqContext) response.Response
+ // List Get Options
+ HandleGetOptions(c *models.ReqContext) response.Response
+
// Read raw file contents out of the store
HandleRequestExport(c *models.ReqContext) response.Response
@@ -30,6 +33,108 @@ type ExportService interface {
HandleRequestStop(c *models.ReqContext) response.Response
}
+var exporters = []Exporter{
+ {
+ Key: "auth",
+ Name: "Authentication",
+ Description: "Saves raw SQL tables",
+ process: dumpAuthTables,
+ },
+ {
+ Key: "dash",
+ Name: "Dashboards",
+ Description: "Save dashboard JSON",
+ process: exportDashboards,
+ Exporters: []Exporter{
+ {
+ Key: "dash_thumbs",
+ Name: "Dashboard thumbnails",
+ Description: "Save current dashboard preview images",
+ process: exportDashboardThumbnails,
+ },
+ },
+ },
+ {
+ Key: "alerts",
+ Name: "Alerts",
+ Description: "Archive alert rules and configuration",
+ process: exportAlerts,
+ },
+ {
+ Key: "ds",
+ Name: "Data sources",
+ Description: "Data source configurations",
+ process: exportDataSources,
+ },
+ {
+ Key: "services",
+ Name: "Services",
+ Description: "Save service settings",
+ Exporters: []Exporter{
+ {
+ Name: "Preferences",
+ Description: "User and team preferences",
+ process: exportSystemPreferences,
+ },
+ {
+ Name: "Stars",
+ Description: "User stars",
+ process: exportSystemStars,
+ },
+ {
+ Name: "Playlists",
+ Description: "Playlists",
+ process: exportSystemPlaylists,
+ },
+ {
+ Name: "Key Value store",
+ Description: "Internal KV store",
+ process: exportKVStore,
+ },
+ {
+ Name: "Short URLs",
+ Description: "saved links",
+ process: exportSystemShortURL,
+ },
+ {
+ Name: "Grafana live",
+ Description: "archived messages",
+ process: exportLive,
+ },
+ },
+ },
+ {
+ Key: "files",
+ Name: "Files",
+ Description: "Export internal file system",
+ process: exportFiles,
+ },
+ {
+ Key: "anno",
+ Name: "Annotations",
+ Description: "Write an DataFrame for all annotations on a dashboard",
+ process: exportAnnotations,
+ },
+ {
+ Key: "plugins",
+ Name: "Plugins",
+ Description: "Save settings for all configured plugins",
+ process: exportPlugins,
+ },
+ {
+ Key: "usage",
+ Name: "Usage",
+ Description: "archive current usage stats",
+ process: exportUsage,
+ },
+ // {
+ // Key: "snapshots",
+ // Name: "Snapshots",
+ // Description: "write snapshots",
+ // process: exportSnapshots,
+ // },
+}
+
type StandardExport struct {
logger log.Logger
glive *live.GrafanaLive
@@ -59,6 +164,13 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles,
}
}
+func (ex *StandardExport) HandleGetOptions(c *models.ReqContext) response.Response {
+ info := map[string]interface{}{
+ "exporters": exporters,
+ }
+ return response.JSON(http.StatusOK, info)
+}
+
func (ex *StandardExport) HandleGetStatus(c *models.ReqContext) response.Response {
ex.mutex.Lock()
defer ex.mutex.Unlock()
@@ -114,7 +226,12 @@ func (ex *StandardExport) HandleRequestExport(c *models.ReqContext) response.Res
}
ex.exportJob = job
- return response.JSON(http.StatusOK, ex.exportJob.getStatus())
+
+ info := map[string]interface{}{
+ "cfg": cfg, // parsed job we are running
+ "status": ex.exportJob.getStatus(),
+ }
+ return response.JSON(http.StatusOK, info)
}
func (ex *StandardExport) broadcastStatus(orgID int64, s ExportStatus) {
diff --git a/pkg/services/export/stub.go b/pkg/services/export/stub.go
index 47e9267bb62..a0ad9a106f8 100644
--- a/pkg/services/export/stub.go
+++ b/pkg/services/export/stub.go
@@ -15,6 +15,10 @@ func (ex *StubExport) HandleGetStatus(c *models.ReqContext) response.Response {
return response.Error(http.StatusForbidden, "feature not enabled", nil)
}
+func (ex *StubExport) HandleGetOptions(c *models.ReqContext) response.Response {
+ return response.Error(http.StatusForbidden, "feature not enabled", nil)
+}
+
func (ex *StubExport) HandleRequestExport(c *models.ReqContext) response.Response {
return response.Error(http.StatusForbidden, "feature not enabled", nil)
}
diff --git a/pkg/services/export/types.go b/pkg/services/export/types.go
index ec80a12a469..5deade2fd69 100644
--- a/pkg/services/export/types.go
+++ b/pkg/services/export/types.go
@@ -19,15 +19,7 @@ type ExportConfig struct {
GeneralFolderPath string `json:"generalFolderPath"`
KeepHistory bool `json:"history"`
- Include struct {
- Auth bool `json:"auth"`
- DS bool `json:"ds"`
- Dash bool `json:"dash"`
- Services bool `json:"services"`
- Usage bool `json:"usage"`
- Anno bool `json:"anno"`
- Snapshots bool `json:"snapshots"`
- } `json:"include"`
+ Exclude map[string]bool `json:"exclude"`
// Depends on the format
Git GitExportConfig `json:"git"`
@@ -43,3 +35,12 @@ type Job interface {
// Will broadcast the live status
type statusBroadcaster func(s ExportStatus)
+
+type Exporter struct {
+ Key string `json:"key"`
+ Name string `json:"name"`
+ Description string `json:"description"`
+ Exporters []Exporter `json:"exporters,omitempty"`
+
+ process func(helper *commitHelper, job *gitExportJob) error
+}
diff --git a/pkg/services/featuremgmt/registry.go b/pkg/services/featuremgmt/registry.go
index 81c39c2bace..f224ab0c434 100644
--- a/pkg/services/featuremgmt/registry.go
+++ b/pkg/services/featuremgmt/registry.go
@@ -244,6 +244,12 @@ var (
State: FeatureStateAlpha,
FrontendOnly: true,
},
+ {
+ Name: "disableSecretsCompatibility",
+ Description: "Disable duplicated secret storage in legacy tables",
+ State: FeatureStateAlpha,
+ RequiresRestart: true,
+ },
{
Name: "logRequestsInstrumentedAsUnknown",
Description: "Logs the path for requests that are instrumented as unknown",
diff --git a/pkg/services/featuremgmt/toggles_gen.go b/pkg/services/featuremgmt/toggles_gen.go
index d95e7241994..7437de560fa 100644
--- a/pkg/services/featuremgmt/toggles_gen.go
+++ b/pkg/services/featuremgmt/toggles_gen.go
@@ -179,6 +179,10 @@ const (
// Use grafana-experimental UI in Cloud Monitoring
FlagCloudMonitoringExperimentalUI = "cloudMonitoringExperimentalUI"
+ // FlagDisableSecretsCompatibility
+ // Disable duplicated secret storage in legacy tables
+ FlagDisableSecretsCompatibility = "disableSecretsCompatibility"
+
// FlagLogRequestsInstrumentedAsUnknown
// Logs the path for requests that are instrumented as unknown
FlagLogRequestsInstrumentedAsUnknown = "logRequestsInstrumentedAsUnknown"
diff --git a/pkg/services/ldap/ldap.go b/pkg/services/ldap/ldap.go
index 977f7f3db35..39390e84046 100644
--- a/pkg/services/ldap/ldap.go
+++ b/pkg/services/ldap/ldap.go
@@ -494,7 +494,7 @@ func (server *Server) AdminBind() error {
err := server.userBind(server.Config.BindDN, server.Config.BindPassword)
if err != nil {
server.log.Error(
- "Cannot authenticate admin user in LDAP",
+ "Cannot authenticate admin user in LDAP. Verify bind configuration",
"error",
err,
)
diff --git a/pkg/services/ngalert/api/api.go b/pkg/services/ngalert/api/api.go
index d86dc462160..69d16d27669 100644
--- a/pkg/services/ngalert/api/api.go
+++ b/pkg/services/ngalert/api/api.go
@@ -18,6 +18,7 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
"github.com/grafana/grafana/pkg/services/ngalert/provisioning"
"github.com/grafana/grafana/pkg/services/ngalert/schedule"
+ "github.com/grafana/grafana/pkg/services/ngalert/sender"
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/quota"
@@ -28,7 +29,7 @@ import (
// timeNow makes it possible to test usage of time
var timeNow = time.Now
-type Scheduler interface {
+type ExternalAlertmanagerProvider interface {
AlertmanagersFor(orgID int64) []*url.URL
DroppedAlertmanagersFor(orgID int64) []*url.URL
}
@@ -81,6 +82,7 @@ type API struct {
Templates *provisioning.TemplateService
MuteTimings *provisioning.MuteTimingService
AlertRules *provisioning.AlertRuleService
+ AlertsRouter *sender.AlertsRouter
}
// RegisterAPIEndpoints registers API handlers
@@ -128,9 +130,9 @@ func (api *API) RegisterAPIEndpoints(m *metrics.API) {
}), m)
api.RegisterConfigurationApiEndpoints(NewForkedConfiguration(
&AdminSrv{
- store: api.AdminConfigStore,
- log: logger,
- scheduler: api.Schedule,
+ store: api.AdminConfigStore,
+ log: logger,
+ alertmanagerProvider: api.AlertsRouter,
},
), m)
diff --git a/pkg/services/ngalert/api/api_admin.go b/pkg/services/ngalert/api/api_admin.go
index a19214eeec9..e3f33f221dd 100644
--- a/pkg/services/ngalert/api/api_admin.go
+++ b/pkg/services/ngalert/api/api_admin.go
@@ -16,14 +16,14 @@ import (
)
type AdminSrv struct {
- scheduler Scheduler
- store store.AdminConfigurationStore
- log log.Logger
+ alertmanagerProvider ExternalAlertmanagerProvider
+ store store.AdminConfigurationStore
+ log log.Logger
}
func (srv AdminSrv) RouteGetAlertmanagers(c *models.ReqContext) response.Response {
- urls := srv.scheduler.AlertmanagersFor(c.OrgId)
- droppedURLs := srv.scheduler.DroppedAlertmanagersFor(c.OrgId)
+ urls := srv.alertmanagerProvider.AlertmanagersFor(c.OrgId)
+ droppedURLs := srv.alertmanagerProvider.DroppedAlertmanagersFor(c.OrgId)
ams := v1.AlertManagersResult{Active: make([]v1.AlertManager, len(urls)), Dropped: make([]v1.AlertManager, len(droppedURLs))}
for i, url := range urls {
ams.Active[i].URL = url.String()
diff --git a/pkg/services/ngalert/api/api_provisioning.go b/pkg/services/ngalert/api/api_provisioning.go
index 5cc6c97f0d4..a01ee84e333 100644
--- a/pkg/services/ngalert/api/api_provisioning.go
+++ b/pkg/services/ngalert/api/api_provisioning.go
@@ -25,7 +25,7 @@ type ProvisioningSrv struct {
}
type ContactPointService interface {
- GetContactPoints(ctx context.Context, orgID int64) ([]definitions.EmbeddedContactPoint, error)
+ GetContactPoints(ctx context.Context, q provisioning.ContactPointQuery) ([]definitions.EmbeddedContactPoint, error)
CreateContactPoint(ctx context.Context, orgID int64, contactPoint definitions.EmbeddedContactPoint, p alerting_models.Provenance) (definitions.EmbeddedContactPoint, error)
UpdateContactPoint(ctx context.Context, orgID int64, contactPoint definitions.EmbeddedContactPoint, p alerting_models.Provenance) error
DeleteContactPoint(ctx context.Context, orgID int64, uid string) error
@@ -52,7 +52,7 @@ type MuteTimingService interface {
type AlertRuleService interface {
GetAlertRule(ctx context.Context, orgID int64, ruleUID string) (alerting_models.AlertRule, alerting_models.Provenance, error)
- CreateAlertRule(ctx context.Context, rule alerting_models.AlertRule, provenance alerting_models.Provenance) (alerting_models.AlertRule, error)
+ CreateAlertRule(ctx context.Context, rule alerting_models.AlertRule, provenance alerting_models.Provenance, userID int64) (alerting_models.AlertRule, error)
UpdateAlertRule(ctx context.Context, rule alerting_models.AlertRule, provenance alerting_models.Provenance) (alerting_models.AlertRule, error)
DeleteAlertRule(ctx context.Context, orgID int64, ruleUID string, provenance alerting_models.Provenance) error
GetRuleGroup(ctx context.Context, orgID int64, folder, group string) (definitions.AlertRuleGroup, error)
@@ -95,7 +95,11 @@ func (srv *ProvisioningSrv) RouteResetPolicyTree(c *models.ReqContext) response.
}
func (srv *ProvisioningSrv) RouteGetContactPoints(c *models.ReqContext) response.Response {
- cps, err := srv.contactPointService.GetContactPoints(c.Req.Context(), c.OrgId)
+ q := provisioning.ContactPointQuery{
+ Name: c.Query("name"),
+ OrgID: c.OrgId,
+ }
+ cps, err := srv.contactPointService.GetContactPoints(c.Req.Context(), q)
if err != nil {
return ErrResp(http.StatusInternalServerError, err, "")
}
@@ -249,8 +253,8 @@ func (srv *ProvisioningSrv) RouteRouteGetAlertRule(c *models.ReqContext, UID str
return response.JSON(http.StatusOK, definitions.NewAlertRule(rule, provenace))
}
-func (srv *ProvisioningSrv) RoutePostAlertRule(c *models.ReqContext, ar definitions.AlertRule) response.Response {
- createdAlertRule, err := srv.alertRules.CreateAlertRule(c.Req.Context(), ar.UpstreamModel(), alerting_models.ProvenanceAPI)
+func (srv *ProvisioningSrv) RoutePostAlertRule(c *models.ReqContext, ar definitions.ProvisionedAlertRule) response.Response {
+ createdAlertRule, err := srv.alertRules.CreateAlertRule(c.Req.Context(), ar.UpstreamModel(), alerting_models.ProvenanceAPI, c.UserId)
if errors.Is(err, alerting_models.ErrAlertRuleFailedValidation) {
return ErrResp(http.StatusBadRequest, err, "")
}
@@ -258,6 +262,9 @@ func (srv *ProvisioningSrv) RoutePostAlertRule(c *models.ReqContext, ar definiti
if errors.Is(err, store.ErrOptimisticLock) {
return ErrResp(http.StatusConflict, err, "")
}
+ if errors.Is(err, alerting_models.ErrQuotaReached) {
+ return ErrResp(http.StatusForbidden, err, "")
+ }
return ErrResp(http.StatusInternalServerError, err, "")
}
ar.ID = createdAlertRule.ID
@@ -266,7 +273,7 @@ func (srv *ProvisioningSrv) RoutePostAlertRule(c *models.ReqContext, ar definiti
return response.JSON(http.StatusCreated, ar)
}
-func (srv *ProvisioningSrv) RoutePutAlertRule(c *models.ReqContext, ar definitions.AlertRule, UID string) response.Response {
+func (srv *ProvisioningSrv) RoutePutAlertRule(c *models.ReqContext, ar definitions.ProvisionedAlertRule, UID string) response.Response {
updated := ar.UpstreamModel()
updated.UID = UID
updatedAlertRule, err := srv.alertRules.UpdateAlertRule(c.Req.Context(), ar.UpstreamModel(), alerting_models.ProvenanceAPI)
diff --git a/pkg/services/ngalert/api/api_provisioning_test.go b/pkg/services/ngalert/api/api_provisioning_test.go
index e176402fcea..9cdf7b67e6e 100644
--- a/pkg/services/ngalert/api/api_provisioning_test.go
+++ b/pkg/services/ngalert/api/api_provisioning_test.go
@@ -15,7 +15,8 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/provisioning"
"github.com/grafana/grafana/pkg/services/ngalert/store"
- secrets "github.com/grafana/grafana/pkg/services/secrets/fakes"
+ "github.com/grafana/grafana/pkg/services/secrets"
+ secrets_fakes "github.com/grafana/grafana/pkg/services/secrets/fakes"
"github.com/grafana/grafana/pkg/services/sqlstore"
"github.com/grafana/grafana/pkg/web"
prometheus "github.com/prometheus/alertmanager/config"
@@ -259,6 +260,20 @@ func TestProvisioningApi(t *testing.T) {
require.Equal(t, 404, response.Status())
})
+
+ t.Run("have reached the rule quota, POST returns 403", func(t *testing.T) {
+ env := createTestEnv(t)
+ quotas := provisioning.MockQuotaChecker{}
+ quotas.EXPECT().LimitExceeded()
+ env.quotas = "as
+ sut := createProvisioningSrvSutFromEnv(t, &env)
+ rule := createTestAlertRule("rule", 1)
+ rc := createTestRequestCtx()
+
+ response := sut.RoutePostAlertRule(&rc, rule)
+
+ require.Equal(t, 403, response.Status())
+ })
})
t.Run("alert rule groups", func(t *testing.T) {
@@ -284,9 +299,21 @@ func TestProvisioningApi(t *testing.T) {
})
}
-func createProvisioningSrvSut(t *testing.T) ProvisioningSrv {
+// testEnvironment binds together common dependencies for testing alerting APIs.
+type testEnvironment struct {
+ secrets secrets.Service
+ log log.Logger
+ store store.DBstore
+ configs provisioning.AMConfigStore
+ xact provisioning.TransactionManager
+ quotas provisioning.QuotaChecker
+ prov provisioning.ProvisioningStore
+}
+
+func createTestEnv(t *testing.T) testEnvironment {
t.Helper()
- secrets := secrets.NewFakeSecretsService()
+
+ secrets := secrets_fakes.NewFakeSecretsService()
log := log.NewNopLogger()
configs := &provisioning.MockAMConfigStore{}
configs.EXPECT().
@@ -298,18 +325,41 @@ func createProvisioningSrvSut(t *testing.T) ProvisioningSrv {
SQLStore: sqlStore,
BaseInterval: time.Second * 10,
}
+ quotas := &provisioning.MockQuotaChecker{}
+ quotas.EXPECT().LimitOK()
xact := &provisioning.NopTransactionManager{}
prov := &provisioning.MockProvisioningStore{}
prov.EXPECT().SaveSucceeds()
prov.EXPECT().GetReturns(models.ProvenanceNone)
+ return testEnvironment{
+ secrets: secrets,
+ log: log,
+ configs: configs,
+ store: store,
+ xact: xact,
+ prov: prov,
+ quotas: quotas,
+ }
+}
+
+func createProvisioningSrvSut(t *testing.T) ProvisioningSrv {
+ t.Helper()
+
+ env := createTestEnv(t)
+ return createProvisioningSrvSutFromEnv(t, &env)
+}
+
+func createProvisioningSrvSutFromEnv(t *testing.T, env *testEnvironment) ProvisioningSrv {
+ t.Helper()
+
return ProvisioningSrv{
- log: log,
+ log: env.log,
policies: newFakeNotificationPolicyService(),
- contactPointService: provisioning.NewContactPointService(configs, secrets, prov, xact, log),
- templates: provisioning.NewTemplateService(configs, prov, xact, log),
- muteTimings: provisioning.NewMuteTimingService(configs, prov, xact, log),
- alertRules: provisioning.NewAlertRuleService(store, prov, xact, 60, 10, log),
+ contactPointService: provisioning.NewContactPointService(env.configs, env.secrets, env.prov, env.xact, env.log),
+ templates: provisioning.NewTemplateService(env.configs, env.prov, env.xact, env.log),
+ muteTimings: provisioning.NewMuteTimingService(env.configs, env.prov, env.xact, env.log),
+ alertRules: provisioning.NewAlertRuleService(env.store, env.prov, env.quotas, env.xact, 60, 10, env.log),
}
}
@@ -418,12 +468,12 @@ func createInvalidMuteTiming() definitions.MuteTimeInterval {
}
}
-func createInvalidAlertRule() definitions.AlertRule {
- return definitions.AlertRule{}
+func createInvalidAlertRule() definitions.ProvisionedAlertRule {
+ return definitions.ProvisionedAlertRule{}
}
-func createTestAlertRule(title string, orgID int64) definitions.AlertRule {
- return definitions.AlertRule{
+func createTestAlertRule(title string, orgID int64) definitions.ProvisionedAlertRule {
+ return definitions.ProvisionedAlertRule{
OrgID: orgID,
Title: title,
Condition: "A",
@@ -445,7 +495,7 @@ func createTestAlertRule(title string, orgID int64) definitions.AlertRule {
}
}
-func insertRule(t *testing.T, srv ProvisioningSrv, rule definitions.AlertRule) {
+func insertRule(t *testing.T, srv ProvisioningSrv, rule definitions.ProvisionedAlertRule) {
t.Helper()
rc := createTestRequestCtx()
diff --git a/pkg/services/ngalert/api/api_ruler.go b/pkg/services/ngalert/api/api_ruler.go
index acf6ebf16a7..b970db4acaa 100644
--- a/pkg/services/ngalert/api/api_ruler.go
+++ b/pkg/services/ngalert/api/api_ruler.go
@@ -42,7 +42,6 @@ type RulerSrv struct {
}
var (
- errQuotaReached = errors.New("quota has been exceeded")
errProvisionedResource = errors.New("request affects resources created via provisioning API")
)
@@ -401,7 +400,7 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, groupKey ngmod
return fmt.Errorf("failed to get alert rules quota: %w", err)
}
if limitReached {
- return errQuotaReached
+ return ngmodels.ErrQuotaReached
}
}
return nil
@@ -412,7 +411,7 @@ func (srv RulerSrv) updateAlertRulesInGroup(c *models.ReqContext, groupKey ngmod
return ErrResp(http.StatusNotFound, err, "failed to update rule group")
} else if errors.Is(err, ngmodels.ErrAlertRuleFailedValidation) || errors.Is(err, errProvisionedResource) {
return ErrResp(http.StatusBadRequest, err, "failed to update rule group")
- } else if errors.Is(err, errQuotaReached) {
+ } else if errors.Is(err, ngmodels.ErrQuotaReached) {
return ErrResp(http.StatusForbidden, err, "")
} else if errors.Is(err, ErrAuthorization) {
return ErrResp(http.StatusUnauthorized, err, "")
diff --git a/pkg/services/ngalert/api/api_testing.go b/pkg/services/ngalert/api/api_testing.go
index 32fd5e4e1a9..d10af199b4d 100644
--- a/pkg/services/ngalert/api/api_testing.go
+++ b/pkg/services/ngalert/api/api_testing.go
@@ -55,10 +55,7 @@ func (srv TestingApiSrv) RouteTestGrafanaRuleConfig(c *models.ReqContext, body a
now = timeNow()
}
- evalResults, err := srv.evaluator.ConditionEval(&evalCond, now)
- if err != nil {
- return ErrResp(http.StatusBadRequest, err, "Failed to evaluate conditions")
- }
+ evalResults := srv.evaluator.ConditionEval(c.Req.Context(), evalCond, now)
frame := evalResults.AsDataFrame()
return response.JSONStreaming(http.StatusOK, util.DynMap{
@@ -121,7 +118,7 @@ func (srv TestingApiSrv) RouteEvalQueries(c *models.ReqContext, cmd apimodels.Ev
return ErrResp(http.StatusBadRequest, err, "invalid queries or expressions")
}
- evalResults, err := srv.evaluator.QueriesAndExpressionsEval(c.SignedInUser.OrgId, cmd.Data, now)
+ evalResults, err := srv.evaluator.QueriesAndExpressionsEval(c.Req.Context(), c.SignedInUser.OrgId, cmd.Data, now)
if err != nil {
return ErrResp(http.StatusBadRequest, err, "Failed to evaluate queries and expressions")
}
diff --git a/pkg/services/ngalert/api/api_testing_test.go b/pkg/services/ngalert/api/api_testing_test.go
index ecebe2eedab..f2f33984bcb 100644
--- a/pkg/services/ngalert/api/api_testing_test.go
+++ b/pkg/services/ngalert/api/api_testing_test.go
@@ -69,7 +69,7 @@ func TestRouteTestGrafanaRuleConfig(t *testing.T) {
evaluator := &eval.FakeEvaluator{}
var result []eval.Result
- evaluator.EXPECT().ConditionEval(mock.Anything, mock.Anything).Return(result, nil)
+ evaluator.EXPECT().ConditionEval(mock.Anything, mock.Anything).Return(result)
srv := createTestingApiSrv(ds, ac, evaluator)
@@ -109,7 +109,7 @@ func TestRouteTestGrafanaRuleConfig(t *testing.T) {
evaluator := &eval.FakeEvaluator{}
var result []eval.Result
- evaluator.EXPECT().ConditionEval(mock.Anything, mock.Anything).Return(result, nil)
+ evaluator.EXPECT().ConditionEval(mock.Anything, mock.Anything).Return(result)
srv := createTestingApiSrv(ds, ac, evaluator)
diff --git a/pkg/services/ngalert/api/forked_provisioning.go b/pkg/services/ngalert/api/forked_provisioning.go
index 2f61d026795..28457980df8 100644
--- a/pkg/services/ngalert/api/forked_provisioning.go
+++ b/pkg/services/ngalert/api/forked_provisioning.go
@@ -87,11 +87,11 @@ func (f *ForkedProvisioningApi) forkRouteGetAlertRule(ctx *models.ReqContext, UI
return f.svc.RouteRouteGetAlertRule(ctx, UID)
}
-func (f *ForkedProvisioningApi) forkRoutePostAlertRule(ctx *models.ReqContext, ar apimodels.AlertRule) response.Response {
+func (f *ForkedProvisioningApi) forkRoutePostAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule) response.Response {
return f.svc.RoutePostAlertRule(ctx, ar)
}
-func (f *ForkedProvisioningApi) forkRoutePutAlertRule(ctx *models.ReqContext, ar apimodels.AlertRule, UID string) response.Response {
+func (f *ForkedProvisioningApi) forkRoutePutAlertRule(ctx *models.ReqContext, ar apimodels.ProvisionedAlertRule, UID string) response.Response {
return f.svc.RoutePutAlertRule(ctx, ar, UID)
}
diff --git a/pkg/services/ngalert/api/generated_base_api_provisioning.go b/pkg/services/ngalert/api/generated_base_api_provisioning.go
index a0e9e6aedf7..e3efd6a1713 100644
--- a/pkg/services/ngalert/api/generated_base_api_provisioning.go
+++ b/pkg/services/ngalert/api/generated_base_api_provisioning.go
@@ -89,7 +89,7 @@ func (f *ForkedProvisioningApi) RouteGetTemplates(ctx *models.ReqContext) respon
return f.forkRouteGetTemplates(ctx)
}
func (f *ForkedProvisioningApi) RoutePostAlertRule(ctx *models.ReqContext) response.Response {
- conf := apimodels.AlertRule{}
+ conf := apimodels.ProvisionedAlertRule{}
if err := web.Bind(ctx.Req, &conf); err != nil {
return response.Error(http.StatusBadRequest, "bad request data", err)
}
@@ -111,7 +111,7 @@ func (f *ForkedProvisioningApi) RoutePostMuteTiming(ctx *models.ReqContext) resp
}
func (f *ForkedProvisioningApi) RoutePutAlertRule(ctx *models.ReqContext) response.Response {
uIDParam := web.Params(ctx.Req)[":UID"]
- conf := apimodels.AlertRule{}
+ conf := apimodels.ProvisionedAlertRule{}
if err := web.Bind(ctx.Req, &conf); err != nil {
return response.Error(http.StatusBadRequest, "bad request data", err)
}
diff --git a/pkg/services/ngalert/api/tooling/Makefile b/pkg/services/ngalert/api/tooling/Makefile
index 00984f11892..279f0632116 100644
--- a/pkg/services/ngalert/api/tooling/Makefile
+++ b/pkg/services/ngalert/api/tooling/Makefile
@@ -30,6 +30,12 @@ post.json: spec.json
api.json: spec-stable.json
go run cmd/clean-swagger/main.go -if $(<) -of $@
+validate-stable: spec-stable.json $(SWAGGER)
+ $(SWAGGER) validate $(<)
+
+validate: spec.json $(SWAGGER)
+ $(SWAGGER) validate $(<)
+
swagger-codegen-api:
docker run --rm -v $$(pwd):/local --user $$(id -u):$$(id -g) parsertongue/swagger-codegen-cli:3.0.32 generate \
-i /local/post.json \
@@ -59,4 +65,4 @@ serve: post.json
serve-stable: api.json
docker run --rm -p 80:8080 -v $$(pwd):/tmp -e SWAGGER_FILE=/tmp/$(<) swaggerapi/swagger-editor
-all: post.json api.json swagger-codegen-api fix copy-files clean
+all: post.json validate api.json validate-stable swagger-codegen-api fix copy-files clean
diff --git a/pkg/services/ngalert/api/tooling/api.json b/pkg/services/ngalert/api/tooling/api.json
index 3078e893cf3..4b5b7c14432 100644
--- a/pkg/services/ngalert/api/tooling/api.json
+++ b/pkg/services/ngalert/api/tooling/api.json
@@ -1821,6 +1821,149 @@
"Provenance": {
"type": "string"
},
+ "ProvisionedAlertRule": {
+ "properties": {
+ "annotations": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "runbook_url": "https://supercoolrunbook.com/page/13"
+ },
+ "type": "object"
+ },
+ "condition": {
+ "example": "A",
+ "type": "string"
+ },
+ "data": {
+ "example": [
+ {
+ "datasourceUid": "-100",
+ "model": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [
+ 0,
+ 0
+ ],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": []
+ },
+ "reducer": {
+ "params": [],
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "datasource": {
+ "type": "__expr__",
+ "uid": "__expr__"
+ },
+ "expression": "1 == 1",
+ "hide": false,
+ "intervalMs": 1000,
+ "maxDataPoints": 43200,
+ "refId": "A",
+ "type": "math"
+ },
+ "queryType": "",
+ "refId": "A",
+ "relativeTimeRange": {
+ "from": 0,
+ "to": 0
+ }
+ }
+ ],
+ "items": {
+ "$ref": "#/definitions/AlertQuery"
+ },
+ "type": "array"
+ },
+ "execErrState": {
+ "enum": [
+ "Alerting",
+ "Error",
+ "OK"
+ ],
+ "type": "string"
+ },
+ "folderUID": {
+ "example": "project_x",
+ "type": "string"
+ },
+ "for": {
+ "$ref": "#/definitions/Duration"
+ },
+ "id": {
+ "format": "int64",
+ "type": "integer"
+ },
+ "labels": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "team": "sre-team-1"
+ },
+ "type": "object"
+ },
+ "noDataState": {
+ "enum": [
+ "Alerting",
+ "NoData",
+ "OK"
+ ],
+ "type": "string"
+ },
+ "orgID": {
+ "format": "int64",
+ "type": "integer"
+ },
+ "provenance": {
+ "$ref": "#/definitions/Provenance"
+ },
+ "ruleGroup": {
+ "example": "eval_group_1",
+ "maxLength": 190,
+ "minLength": 1,
+ "type": "string"
+ },
+ "title": {
+ "example": "Always firing",
+ "maxLength": 190,
+ "minLength": 1,
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ },
+ "updated": {
+ "format": "date-time",
+ "readOnly": true,
+ "type": "string"
+ }
+ },
+ "required": [
+ "orgID",
+ "folderUID",
+ "ruleGroup",
+ "title",
+ "condition",
+ "data",
+ "noDataState",
+ "execErrState",
+ "for"
+ ],
+ "type": "object"
+ },
"PushoverConfig": {
"properties": {
"expire": {
@@ -3269,15 +3412,15 @@
"in": "body",
"name": "Body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"201": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -3328,9 +3471,9 @@
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"404": {
@@ -3359,15 +3502,15 @@
"in": "body",
"name": "Body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -3386,6 +3529,14 @@
"/api/v1/provisioning/contact-points": {
"get": {
"operationId": "RouteGetContactpoints",
+ "parameters": [
+ {
+ "description": "Filter by name",
+ "in": "query",
+ "name": "name",
+ "type": "string"
+ }
+ ],
"responses": {
"200": {
"description": "ContactPoints",
diff --git a/pkg/services/ngalert/api/tooling/definitions/alertmanager_validation.go b/pkg/services/ngalert/api/tooling/definitions/alertmanager_validation.go
index 20338b4fc5a..86c8a57cc93 100644
--- a/pkg/services/ngalert/api/tooling/definitions/alertmanager_validation.go
+++ b/pkg/services/ngalert/api/tooling/definitions/alertmanager_validation.go
@@ -2,11 +2,12 @@ package definitions
import (
"fmt"
- "html/template"
+ tmplhtml "html/template"
"regexp"
"strings"
"time"
+ "github.com/prometheus/alertmanager/template"
"github.com/prometheus/common/model"
"gopkg.in/yaml.v3"
)
@@ -64,7 +65,9 @@ func (t *MessageTemplate) Validate() error {
return fmt.Errorf("template must have content")
}
- _, err := template.New("").Parse(t.Template)
+ tmpl := tmplhtml.New("").Option("missingkey=zero")
+ tmpl.Funcs(tmplhtml.FuncMap(template.DefaultFuncs))
+ _, err := tmpl.Parse(t.Template)
if err != nil {
return fmt.Errorf("invalid template: %w", err)
}
diff --git a/pkg/services/ngalert/api/tooling/definitions/provisioning_alert_rules.go b/pkg/services/ngalert/api/tooling/definitions/provisioning_alert_rules.go
index 8ffd32b2ad4..65c1f650a76 100644
--- a/pkg/services/ngalert/api/tooling/definitions/provisioning_alert_rules.go
+++ b/pkg/services/ngalert/api/tooling/definitions/provisioning_alert_rules.go
@@ -11,7 +11,7 @@ import (
// Get a specific alert rule by UID.
//
// Responses:
-// 200: AlertRule
+// 200: ProvisionedAlertRule
// 404: description: Not found.
// swagger:route POST /api/v1/provisioning/alert-rules provisioning stable RoutePostAlertRule
@@ -22,7 +22,7 @@ import (
// - application/json
//
// Responses:
-// 201: AlertRule
+// 201: ProvisionedAlertRule
// 400: ValidationError
// swagger:route PUT /api/v1/provisioning/alert-rules/{UID} provisioning stable RoutePutAlertRule
@@ -33,7 +33,7 @@ import (
// - application/json
//
// Responses:
-// 200: AlertRule
+// 200: ProvisionedAlertRule
// 400: ValidationError
// swagger:route DELETE /api/v1/provisioning/alert-rules/{UID} provisioning stable RouteDeleteAlertRule
@@ -53,10 +53,10 @@ type AlertRuleUIDReference struct {
// swagger:parameters RoutePostAlertRule RoutePutAlertRule
type AlertRulePayload struct {
// in:body
- Body AlertRule
+ Body ProvisionedAlertRule
}
-type AlertRule struct {
+type ProvisionedAlertRule struct {
ID int64 `json:"id"`
UID string `json:"uid"`
// required: true
@@ -96,7 +96,7 @@ type AlertRule struct {
Provenance models.Provenance `json:"provenance,omitempty"`
}
-func (a *AlertRule) UpstreamModel() models.AlertRule {
+func (a *ProvisionedAlertRule) UpstreamModel() models.AlertRule {
return models.AlertRule{
ID: a.ID,
UID: a.UID,
@@ -115,8 +115,8 @@ func (a *AlertRule) UpstreamModel() models.AlertRule {
}
}
-func NewAlertRule(rule models.AlertRule, provenance models.Provenance) AlertRule {
- return AlertRule{
+func NewAlertRule(rule models.AlertRule, provenance models.Provenance) ProvisionedAlertRule {
+ return ProvisionedAlertRule{
ID: rule.ID,
UID: rule.UID,
OrgID: rule.OrgID,
@@ -172,6 +172,7 @@ type AlertRuleGroupPayload struct {
Body AlertRuleGroupMetadata
}
+// swagger:model
type AlertRuleGroupMetadata struct {
Interval int64 `json:"interval"`
}
diff --git a/pkg/services/ngalert/api/tooling/definitions/provisioning_contactpoints.go b/pkg/services/ngalert/api/tooling/definitions/provisioning_contactpoints.go
index 08245794a3f..894271203ee 100644
--- a/pkg/services/ngalert/api/tooling/definitions/provisioning_contactpoints.go
+++ b/pkg/services/ngalert/api/tooling/definitions/provisioning_contactpoints.go
@@ -53,6 +53,14 @@ type ContactPointUIDReference struct {
UID string
}
+// swagger:parameters RouteGetContactpoints
+type ContactPointParams struct {
+ // Filter by name
+ // in: query
+ // required: false
+ Name string `json:"name"`
+}
+
// swagger:parameters RoutePostContactpoints RoutePutContactpoint
type ContactPointPayload struct {
// in:body
diff --git a/pkg/services/ngalert/api/tooling/post.json b/pkg/services/ngalert/api/tooling/post.json
index d5181930c97..ae00df495ef 100644
--- a/pkg/services/ngalert/api/tooling/post.json
+++ b/pkg/services/ngalert/api/tooling/post.json
@@ -1821,6 +1821,149 @@
"Provenance": {
"type": "string"
},
+ "ProvisionedAlertRule": {
+ "properties": {
+ "annotations": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "runbook_url": "https://supercoolrunbook.com/page/13"
+ },
+ "type": "object"
+ },
+ "condition": {
+ "example": "A",
+ "type": "string"
+ },
+ "data": {
+ "example": [
+ {
+ "datasourceUid": "-100",
+ "model": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [
+ 0,
+ 0
+ ],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": []
+ },
+ "reducer": {
+ "params": [],
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "datasource": {
+ "type": "__expr__",
+ "uid": "__expr__"
+ },
+ "expression": "1 == 1",
+ "hide": false,
+ "intervalMs": 1000,
+ "maxDataPoints": 43200,
+ "refId": "A",
+ "type": "math"
+ },
+ "queryType": "",
+ "refId": "A",
+ "relativeTimeRange": {
+ "from": 0,
+ "to": 0
+ }
+ }
+ ],
+ "items": {
+ "$ref": "#/definitions/AlertQuery"
+ },
+ "type": "array"
+ },
+ "execErrState": {
+ "enum": [
+ "Alerting",
+ "Error",
+ "OK"
+ ],
+ "type": "string"
+ },
+ "folderUID": {
+ "example": "project_x",
+ "type": "string"
+ },
+ "for": {
+ "$ref": "#/definitions/Duration"
+ },
+ "id": {
+ "format": "int64",
+ "type": "integer"
+ },
+ "labels": {
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "team": "sre-team-1"
+ },
+ "type": "object"
+ },
+ "noDataState": {
+ "enum": [
+ "Alerting",
+ "NoData",
+ "OK"
+ ],
+ "type": "string"
+ },
+ "orgID": {
+ "format": "int64",
+ "type": "integer"
+ },
+ "provenance": {
+ "$ref": "#/definitions/Provenance"
+ },
+ "ruleGroup": {
+ "example": "eval_group_1",
+ "maxLength": 190,
+ "minLength": 1,
+ "type": "string"
+ },
+ "title": {
+ "example": "Always firing",
+ "maxLength": 190,
+ "minLength": 1,
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ },
+ "updated": {
+ "format": "date-time",
+ "readOnly": true,
+ "type": "string"
+ }
+ },
+ "required": [
+ "orgID",
+ "folderUID",
+ "ruleGroup",
+ "title",
+ "condition",
+ "data",
+ "noDataState",
+ "execErrState",
+ "for"
+ ],
+ "type": "object"
+ },
"PushoverConfig": {
"properties": {
"expire": {
@@ -2761,7 +2904,6 @@
"type": "object"
},
"alertGroup": {
- "description": "AlertGroup alert group",
"properties": {
"alerts": {
"description": "alerts",
@@ -2954,7 +3096,6 @@
"type": "array"
},
"gettableSilence": {
- "description": "GettableSilence gettable silence",
"properties": {
"comment": {
"description": "comment",
@@ -4895,7 +5036,7 @@
"in": "body",
"name": "Body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
@@ -4985,7 +5126,7 @@
"in": "body",
"name": "Body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
@@ -5012,6 +5153,14 @@
"/api/v1/provisioning/contact-points": {
"get": {
"operationId": "RouteGetContactpoints",
+ "parameters": [
+ {
+ "description": "Filter by name",
+ "in": "query",
+ "name": "name",
+ "type": "string"
+ }
+ ],
"responses": {
"200": {
"description": "ContactPoints",
diff --git a/pkg/services/ngalert/api/tooling/spec.json b/pkg/services/ngalert/api/tooling/spec.json
index be46b5746e5..289e18c7b3f 100644
--- a/pkg/services/ngalert/api/tooling/spec.json
+++ b/pkg/services/ngalert/api/tooling/spec.json
@@ -1659,15 +1659,15 @@
"name": "Body",
"in": "body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"201": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -1698,9 +1698,9 @@
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"404": {
@@ -1730,15 +1730,15 @@
"name": "Body",
"in": "body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -1780,6 +1780,14 @@
],
"summary": "Get all the contact points.",
"operationId": "RouteGetContactpoints",
+ "parameters": [
+ {
+ "type": "string",
+ "description": "Filter by name",
+ "name": "name",
+ "in": "query"
+ }
+ ],
"responses": {
"200": {
"description": "ContactPoints",
@@ -4198,6 +4206,149 @@
"Provenance": {
"type": "string"
},
+ "ProvisionedAlertRule": {
+ "type": "object",
+ "required": [
+ "orgID",
+ "folderUID",
+ "ruleGroup",
+ "title",
+ "condition",
+ "data",
+ "noDataState",
+ "execErrState",
+ "for"
+ ],
+ "properties": {
+ "annotations": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "runbook_url": "https://supercoolrunbook.com/page/13"
+ }
+ },
+ "condition": {
+ "type": "string",
+ "example": "A"
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/AlertQuery"
+ },
+ "example": [
+ {
+ "datasourceUid": "-100",
+ "model": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [
+ 0,
+ 0
+ ],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": []
+ },
+ "reducer": {
+ "params": [],
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "datasource": {
+ "type": "__expr__",
+ "uid": "__expr__"
+ },
+ "expression": "1 == 1",
+ "hide": false,
+ "intervalMs": 1000,
+ "maxDataPoints": 43200,
+ "refId": "A",
+ "type": "math"
+ },
+ "queryType": "",
+ "refId": "A",
+ "relativeTimeRange": {
+ "from": 0,
+ "to": 0
+ }
+ }
+ ]
+ },
+ "execErrState": {
+ "type": "string",
+ "enum": [
+ "Alerting",
+ "Error",
+ "OK"
+ ]
+ },
+ "folderUID": {
+ "type": "string",
+ "example": "project_x"
+ },
+ "for": {
+ "$ref": "#/definitions/Duration"
+ },
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "labels": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "team": "sre-team-1"
+ }
+ },
+ "noDataState": {
+ "type": "string",
+ "enum": [
+ "Alerting",
+ "NoData",
+ "OK"
+ ]
+ },
+ "orgID": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "provenance": {
+ "$ref": "#/definitions/Provenance"
+ },
+ "ruleGroup": {
+ "type": "string",
+ "maxLength": 190,
+ "minLength": 1,
+ "example": "eval_group_1"
+ },
+ "title": {
+ "type": "string",
+ "maxLength": 190,
+ "minLength": 1,
+ "example": "Always firing"
+ },
+ "uid": {
+ "type": "string"
+ },
+ "updated": {
+ "type": "string",
+ "format": "date-time",
+ "readOnly": true
+ }
+ }
+ },
"PushoverConfig": {
"type": "object",
"properties": {
@@ -5138,7 +5289,6 @@
}
},
"alertGroup": {
- "description": "AlertGroup alert group",
"type": "object",
"required": [
"alerts",
@@ -5163,6 +5313,7 @@
"$ref": "#/definitions/alertGroup"
},
"alertGroups": {
+ "description": "AlertGroups alert groups",
"type": "array",
"items": {
"$ref": "#/definitions/alertGroup"
@@ -5328,6 +5479,7 @@
"$ref": "#/definitions/gettableAlert"
},
"gettableAlerts": {
+ "description": "GettableAlerts gettable alerts",
"type": "array",
"items": {
"$ref": "#/definitions/gettableAlert"
@@ -5536,7 +5688,6 @@
"$ref": "#/definitions/postableSilence"
},
"receiver": {
- "description": "Receiver receiver",
"type": "object",
"required": [
"name"
diff --git a/pkg/services/ngalert/eval/eval.go b/pkg/services/ngalert/eval/eval.go
index c98abf8ff09..24f261b8b9b 100644
--- a/pkg/services/ngalert/eval/eval.go
+++ b/pkg/services/ngalert/eval/eval.go
@@ -28,9 +28,9 @@ import (
//go:generate mockery --name Evaluator --structname FakeEvaluator --inpackage --filename evaluator_mock.go --with-expecter
type Evaluator interface {
// ConditionEval executes conditions and evaluates the result.
- ConditionEval(condition *models.Condition, now time.Time) (Results, error)
+ ConditionEval(ctx context.Context, condition models.Condition, now time.Time) Results
// QueriesAndExpressionsEval executes queries and expressions and returns the result.
- QueriesAndExpressionsEval(orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error)
+ QueriesAndExpressionsEval(ctx context.Context, orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error)
}
type evaluatorImpl struct {
@@ -89,6 +89,15 @@ type ExecutionResults struct {
// Results is a slice of evaluated alert instances states.
type Results []Result
+func (evalResults Results) HasErrors() bool {
+ for _, r := range evalResults {
+ if r.State == Error {
+ return true
+ }
+ }
+ return false
+}
+
// Result contains the evaluated State of an alert instance
// identified by its labels.
type Result struct {
@@ -153,8 +162,8 @@ type AlertExecCtx struct {
Ctx context.Context
}
-// GetExprRequest validates the condition, gets the datasource information and creates an expr.Request from it.
-func GetExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, dsCacheService datasources.CacheService, secretsService secrets.Service) (*expr.Request, error) {
+// getExprRequest validates the condition, gets the datasource information and creates an expr.Request from it.
+func getExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, dsCacheService datasources.CacheService, secretsService secrets.Service) (*expr.Request, error) {
req := &expr.Request{
OrgId: ctx.OrgID,
Headers: map[string]string{
@@ -166,8 +175,7 @@ func GetExprRequest(ctx AlertExecCtx, data []models.AlertQuery, now time.Time, d
datasources := make(map[string]*datasources.DataSource, len(data))
- for i := range data {
- q := data[i]
+ for _, q := range data {
model, err := q.GetModel()
if err != nil {
return nil, fmt.Errorf("failed to get query model: %w", err)
@@ -259,12 +267,7 @@ type NumberValueCapture struct {
Value *float64
}
-func executeCondition(ctx AlertExecCtx, c *models.Condition, now time.Time, exprService *expr.Service, dsCacheService datasources.CacheService, secretsService secrets.Service) ExecutionResults {
- execResp, err := executeQueriesAndExpressions(ctx, c.Data, now, exprService, dsCacheService, secretsService)
- if err != nil {
- return ExecutionResults{Error: err}
- }
-
+func queryDataResponseToExecutionResults(c models.Condition, execResp *backend.QueryDataResponse) ExecutionResults {
// eval captures for the '__value_string__' annotation and the Value property of the API response.
captures := make([]NumberValueCapture, 0, len(execResp.Responses))
captureVal := func(refID string, labels data.Labels, value *float64) {
@@ -356,7 +359,7 @@ func executeQueriesAndExpressions(ctx AlertExecCtx, data []models.AlertQuery, no
}
}()
- queryDataReq, err := GetExprRequest(ctx, data, now, dsCacheService, secretsService)
+ queryDataReq, err := getExprRequest(ctx, data, now, dsCacheService, secretsService)
if err != nil {
return nil, err
}
@@ -564,8 +567,6 @@ func (evalResults Results) AsDataFrame() data.Frame {
labelColumns = append(labelColumns, k)
}
- labelColumns = sort.StringSlice(labelColumns)
-
frame := data.NewFrame("evaluation results")
for _, lKey := range labelColumns {
frame.Fields = append(frame.Fields, data.NewField(lKey, nil, make([]string, fieldLen)))
@@ -591,21 +592,20 @@ func (evalResults Results) AsDataFrame() data.Frame {
}
// ConditionEval executes conditions and evaluates the result.
-func (e *evaluatorImpl) ConditionEval(condition *models.Condition, now time.Time) (Results, error) {
- alertCtx, cancelFn := context.WithTimeout(context.Background(), e.cfg.UnifiedAlerting.EvaluationTimeout)
- defer cancelFn()
-
- alertExecCtx := AlertExecCtx{OrgID: condition.OrgID, Ctx: alertCtx, ExpressionsEnabled: e.cfg.ExpressionsEnabled, Log: e.log}
-
- execResult := executeCondition(alertExecCtx, condition, now, e.expressionService, e.dataSourceCache, e.secretsService)
-
- evalResults := evaluateExecutionResult(execResult, now)
- return evalResults, nil
+func (e *evaluatorImpl) ConditionEval(ctx context.Context, condition models.Condition, now time.Time) Results {
+ execResp, err := e.QueriesAndExpressionsEval(ctx, condition.OrgID, condition.Data, now)
+ var execResults ExecutionResults
+ if err != nil {
+ execResults = ExecutionResults{Error: err}
+ } else {
+ execResults = queryDataResponseToExecutionResults(condition, execResp)
+ }
+ return evaluateExecutionResult(execResults, now)
}
// QueriesAndExpressionsEval executes queries and expressions and returns the result.
-func (e *evaluatorImpl) QueriesAndExpressionsEval(orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error) {
- alertCtx, cancelFn := context.WithTimeout(context.Background(), e.cfg.UnifiedAlerting.EvaluationTimeout)
+func (e *evaluatorImpl) QueriesAndExpressionsEval(ctx context.Context, orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error) {
+ alertCtx, cancelFn := context.WithTimeout(ctx, e.cfg.UnifiedAlerting.EvaluationTimeout)
defer cancelFn()
alertExecCtx := AlertExecCtx{OrgID: orgID, Ctx: alertCtx, ExpressionsEnabled: e.cfg.ExpressionsEnabled, Log: e.log}
diff --git a/pkg/services/ngalert/eval/evaluator_mock.go b/pkg/services/ngalert/eval/evaluator_mock.go
index ed3f46b7f2a..6c80a3e5818 100644
--- a/pkg/services/ngalert/eval/evaluator_mock.go
+++ b/pkg/services/ngalert/eval/evaluator_mock.go
@@ -3,6 +3,8 @@
package eval
import (
+ "context"
+
backend "github.com/grafana/grafana-plugin-sdk-go/backend"
mock "github.com/stretchr/testify/mock"
@@ -25,11 +27,11 @@ func (_m *FakeEvaluator) EXPECT() *FakeEvaluator_Expecter {
}
// ConditionEval provides a mock function with given fields: condition, now
-func (_m *FakeEvaluator) ConditionEval(condition *models.Condition, now time.Time) (Results, error) {
+func (_m *FakeEvaluator) ConditionEval(ctx context.Context, condition models.Condition, now time.Time) Results {
ret := _m.Called(condition, now)
var r0 Results
- if rf, ok := ret.Get(0).(func(*models.Condition, time.Time) Results); ok {
+ if rf, ok := ret.Get(0).(func(models.Condition, time.Time) Results); ok {
r0 = rf(condition, now)
} else {
if ret.Get(0) != nil {
@@ -37,14 +39,7 @@ func (_m *FakeEvaluator) ConditionEval(condition *models.Condition, now time.Tim
}
}
- var r1 error
- if rf, ok := ret.Get(1).(func(*models.Condition, time.Time) error); ok {
- r1 = rf(condition, now)
- } else {
- r1 = ret.Error(1)
- }
-
- return r0, r1
+ return r0
}
// FakeEvaluator_ConditionEval_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'ConditionEval'
@@ -53,26 +48,26 @@ type FakeEvaluator_ConditionEval_Call struct {
}
// ConditionEval is a helper method to define mock.On call
-// - condition *models.Condition
+// - condition models.Condition
// - now time.Time
func (_e *FakeEvaluator_Expecter) ConditionEval(condition interface{}, now interface{}) *FakeEvaluator_ConditionEval_Call {
return &FakeEvaluator_ConditionEval_Call{Call: _e.mock.On("ConditionEval", condition, now)}
}
-func (_c *FakeEvaluator_ConditionEval_Call) Run(run func(condition *models.Condition, now time.Time)) *FakeEvaluator_ConditionEval_Call {
+func (_c *FakeEvaluator_ConditionEval_Call) Run(run func(condition models.Condition, now time.Time)) *FakeEvaluator_ConditionEval_Call {
_c.Call.Run(func(args mock.Arguments) {
- run(args[0].(*models.Condition), args[1].(time.Time))
+ run(args[0].(models.Condition), args[1].(time.Time))
})
return _c
}
-func (_c *FakeEvaluator_ConditionEval_Call) Return(_a0 Results, _a1 error) *FakeEvaluator_ConditionEval_Call {
- _c.Call.Return(_a0, _a1)
+func (_c *FakeEvaluator_ConditionEval_Call) Return(_a0 Results) *FakeEvaluator_ConditionEval_Call {
+ _c.Call.Return(_a0)
return _c
}
// QueriesAndExpressionsEval provides a mock function with given fields: orgID, data, now
-func (_m *FakeEvaluator) QueriesAndExpressionsEval(orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error) {
+func (_m *FakeEvaluator) QueriesAndExpressionsEval(ctx context.Context, orgID int64, data []models.AlertQuery, now time.Time) (*backend.QueryDataResponse, error) {
ret := _m.Called(orgID, data, now)
var r0 *backend.QueryDataResponse
diff --git a/pkg/services/ngalert/models/alert_rule.go b/pkg/services/ngalert/models/alert_rule.go
index 92d02b1a0e6..4363c3d0475 100644
--- a/pkg/services/ngalert/models/alert_rule.go
+++ b/pkg/services/ngalert/models/alert_rule.go
@@ -23,6 +23,7 @@ var (
ErrRuleGroupNamespaceNotFound = errors.New("rule group not found under this namespace")
ErrAlertRuleFailedValidation = errors.New("invalid alert rule")
ErrAlertRuleUniqueConstraintViolation = errors.New("a conflicting alert rule is found: rule title under the same organisation and folder should be unique")
+ ErrQuotaReached = errors.New("quota has been exceeded")
)
// swagger:enum NoDataState
@@ -170,6 +171,14 @@ func (alertRule *AlertRule) GetLabels(opts ...LabelOption) map[string]string {
return labels
}
+func (alertRule *AlertRule) GetEvalCondition() Condition {
+ return Condition{
+ Condition: alertRule.Condition,
+ OrgID: alertRule.OrgID,
+ Data: alertRule.Data,
+ }
+}
+
// Diff calculates diff between two alert rules. Returns nil if two rules are equal. Otherwise, returns cmputil.DiffReport
func (alertRule *AlertRule) Diff(rule *AlertRule, ignore ...string) cmputil.DiffReport {
var reporter cmputil.DiffReporter
diff --git a/pkg/services/ngalert/ngalert.go b/pkg/services/ngalert/ngalert.go
index fa95a519bbb..5676e8568db 100644
--- a/pkg/services/ngalert/ngalert.go
+++ b/pkg/services/ngalert/ngalert.go
@@ -2,6 +2,7 @@ package ngalert
import (
"context"
+ "fmt"
"net/url"
"github.com/benbjohnson/clock"
@@ -23,6 +24,7 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/notifier"
"github.com/grafana/grafana/pkg/services/ngalert/provisioning"
"github.com/grafana/grafana/pkg/services/ngalert/schedule"
+ "github.com/grafana/grafana/pkg/services/ngalert/sender"
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/store"
"github.com/grafana/grafana/pkg/services/notifications"
@@ -92,6 +94,7 @@ type AlertNG struct {
// Alerting notification services
MultiOrgAlertmanager *notifier.MultiOrgAlertmanager
+ AlertsRouter *sender.AlertsRouter
accesscontrol accesscontrol.AccessControl
bus bus.Bus
@@ -125,24 +128,7 @@ func (ng *AlertNG) init() error {
// Let's make sure we're able to complete an initial sync of Alertmanagers before we start the alerting components.
if err := ng.MultiOrgAlertmanager.LoadAndSyncAlertmanagersForOrgs(context.Background()); err != nil {
- return err
- }
-
- schedCfg := schedule.SchedulerCfg{
- C: clock.New(),
- BaseInterval: ng.Cfg.UnifiedAlerting.BaseInterval,
- Logger: ng.Log,
- MaxAttempts: ng.Cfg.UnifiedAlerting.MaxAttempts,
- Evaluator: eval.NewEvaluator(ng.Cfg, ng.Log, ng.DataSourceCache, ng.SecretsService, ng.ExpressionService),
- InstanceStore: store,
- RuleStore: store,
- AdminConfigStore: store,
- OrgStore: store,
- MultiOrgNotifier: ng.MultiOrgAlertmanager,
- Metrics: ng.Metrics.GetSchedulerMetrics(),
- AdminConfigPollInterval: ng.Cfg.UnifiedAlerting.AdminConfigPollInterval,
- DisabledOrgs: ng.Cfg.UnifiedAlerting.DisabledOrgs,
- MinRuleInterval: ng.Cfg.UnifiedAlerting.MinInterval,
+ return fmt.Errorf("failed to initialize alerting because multiorg alertmanager manager failed to warm up: %w", err)
}
appUrl, err := url.Parse(ng.Cfg.AppURL)
@@ -151,7 +137,29 @@ func (ng *AlertNG) init() error {
appUrl = nil
}
- stateManager := state.NewManager(ng.Log, ng.Metrics.GetStateMetrics(), appUrl, store, store, ng.dashboardService, ng.imageService, clock.New())
+ clk := clock.New()
+
+ alertsRouter := sender.NewAlertsRouter(ng.MultiOrgAlertmanager, store, clk, appUrl, ng.Cfg.UnifiedAlerting.DisabledOrgs, ng.Cfg.UnifiedAlerting.AdminConfigPollInterval)
+
+ // Make sure we sync at least once as Grafana starts to get the router up and running before we start sending any alerts.
+ if err := alertsRouter.SyncAndApplyConfigFromDatabase(); err != nil {
+ return fmt.Errorf("failed to initialize alerting because alert notifications router failed to warm up: %w", err)
+ }
+
+ ng.AlertsRouter = alertsRouter
+
+ schedCfg := schedule.SchedulerCfg{
+ Cfg: ng.Cfg.UnifiedAlerting,
+ C: clk,
+ Logger: ng.Log,
+ Evaluator: eval.NewEvaluator(ng.Cfg, ng.Log, ng.DataSourceCache, ng.SecretsService, ng.ExpressionService),
+ InstanceStore: store,
+ RuleStore: store,
+ Metrics: ng.Metrics.GetSchedulerMetrics(),
+ AlertSender: alertsRouter,
+ }
+
+ stateManager := state.NewManager(ng.Log, ng.Metrics.GetStateMetrics(), appUrl, store, store, ng.dashboardService, ng.imageService, clk)
scheduler := schedule.NewScheduler(schedCfg, appUrl, stateManager, ng.bus)
ng.stateManager = stateManager
@@ -162,7 +170,7 @@ func (ng *AlertNG) init() error {
contactPointService := provisioning.NewContactPointService(store, ng.SecretsService, store, store, ng.Log)
templateService := provisioning.NewTemplateService(store, store, store, ng.Log)
muteTimingService := provisioning.NewMuteTimingService(store, store, store, ng.Log)
- alertRuleService := provisioning.NewAlertRuleService(store, store, store,
+ alertRuleService := provisioning.NewAlertRuleService(store, store, ng.QuotaService, store,
int64(ng.Cfg.UnifiedAlerting.DefaultRuleEvaluationInterval.Seconds()),
int64(ng.Cfg.UnifiedAlerting.BaseInterval.Seconds()), ng.Log)
@@ -189,6 +197,7 @@ func (ng *AlertNG) init() error {
Templates: templateService,
MuteTimings: muteTimingService,
AlertRules: alertRuleService,
+ AlertsRouter: alertsRouter,
}
api.RegisterAPIEndpoints(ng.Metrics.GetAPIMetrics())
@@ -202,14 +211,18 @@ func (ng *AlertNG) Run(ctx context.Context) error {
children, subCtx := errgroup.WithContext(ctx)
+ children.Go(func() error {
+ return ng.MultiOrgAlertmanager.Run(subCtx)
+ })
+ children.Go(func() error {
+ return ng.AlertsRouter.Run(subCtx)
+ })
+
if ng.Cfg.UnifiedAlerting.ExecuteAlerts {
children.Go(func() error {
return ng.schedule.Run(subCtx)
})
}
- children.Go(func() error {
- return ng.MultiOrgAlertmanager.Run(subCtx)
- })
return children.Wait()
}
diff --git a/pkg/services/ngalert/provisioning/alert_rules.go b/pkg/services/ngalert/provisioning/alert_rules.go
index 5d618317eda..a87d9c223bc 100644
--- a/pkg/services/ngalert/provisioning/alert_rules.go
+++ b/pkg/services/ngalert/provisioning/alert_rules.go
@@ -10,6 +10,7 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
+ "github.com/grafana/grafana/pkg/services/quota"
"github.com/grafana/grafana/pkg/util"
)
@@ -18,12 +19,14 @@ type AlertRuleService struct {
baseIntervalSeconds int64
ruleStore RuleStore
provenanceStore ProvisioningStore
+ quotas QuotaChecker
xact TransactionManager
log log.Logger
}
func NewAlertRuleService(ruleStore RuleStore,
provenanceStore ProvisioningStore,
+ quotas QuotaChecker,
xact TransactionManager,
defaultIntervalSeconds int64,
baseIntervalSeconds int64,
@@ -33,6 +36,7 @@ func NewAlertRuleService(ruleStore RuleStore,
baseIntervalSeconds: baseIntervalSeconds,
ruleStore: ruleStore,
provenanceStore: provenanceStore,
+ quotas: quotas,
xact: xact,
log: log,
}
@@ -57,7 +61,7 @@ func (service *AlertRuleService) GetAlertRule(ctx context.Context, orgID int64,
// CreateAlertRule creates a new alert rule. This function will ignore any
// interval that is set in the rule struct and use the already existing group
// interval or the default one.
-func (service *AlertRuleService) CreateAlertRule(ctx context.Context, rule models.AlertRule, provenance models.Provenance) (models.AlertRule, error) {
+func (service *AlertRuleService) CreateAlertRule(ctx context.Context, rule models.AlertRule, provenance models.Provenance, userID int64) (models.AlertRule, error) {
if rule.UID == "" {
rule.UID = util.GenerateShortUID()
}
@@ -82,6 +86,18 @@ func (service *AlertRuleService) CreateAlertRule(ctx context.Context, rule model
} else {
return errors.New("couldn't find newly created id")
}
+
+ limitReached, err := service.quotas.CheckQuotaReached(ctx, "alert_rule", "a.ScopeParameters{
+ OrgId: rule.OrgID,
+ UserId: userID,
+ })
+ if err != nil {
+ return fmt.Errorf("failed to check alert rule quota: %w", err)
+ }
+ if limitReached {
+ return models.ErrQuotaReached
+ }
+
return service.provenanceStore.SetProvenance(ctx, &rule, rule.OrgID, provenance)
})
if err != nil {
diff --git a/pkg/services/ngalert/provisioning/alert_rules_test.go b/pkg/services/ngalert/provisioning/alert_rules_test.go
index 8bfe5cd9f71..f020f5b68d8 100644
--- a/pkg/services/ngalert/provisioning/alert_rules_test.go
+++ b/pkg/services/ngalert/provisioning/alert_rules_test.go
@@ -15,26 +15,29 @@ import (
func TestAlertRuleService(t *testing.T) {
ruleService := createAlertRuleService(t)
+
t.Run("alert rule creation should return the created id", func(t *testing.T) {
var orgID int64 = 1
- rule, err := ruleService.CreateAlertRule(context.Background(), dummyRule("test#1", orgID), models.ProvenanceNone)
+ rule, err := ruleService.CreateAlertRule(context.Background(), dummyRule("test#1", orgID), models.ProvenanceNone, 0)
require.NoError(t, err)
require.NotEqual(t, 0, rule.ID, "expected to get the created id and not the zero value")
})
+
t.Run("alert rule creation should set the right provenance", func(t *testing.T) {
var orgID int64 = 1
- rule, err := ruleService.CreateAlertRule(context.Background(), dummyRule("test#2", orgID), models.ProvenanceAPI)
+ rule, err := ruleService.CreateAlertRule(context.Background(), dummyRule("test#2", orgID), models.ProvenanceAPI, 0)
require.NoError(t, err)
_, provenance, err := ruleService.GetAlertRule(context.Background(), orgID, rule.UID)
require.NoError(t, err)
require.Equal(t, models.ProvenanceAPI, provenance)
})
+
t.Run("alert rule group should be updated correctly", func(t *testing.T) {
var orgID int64 = 1
rule := dummyRule("test#3", orgID)
rule.RuleGroup = "a"
- rule, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone)
+ rule, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone, 0)
require.NoError(t, err)
require.Equal(t, int64(60), rule.IntervalSeconds)
@@ -46,11 +49,12 @@ func TestAlertRuleService(t *testing.T) {
require.NoError(t, err)
require.Equal(t, interval, rule.IntervalSeconds)
})
+
t.Run("alert rule should get interval from existing rule group", func(t *testing.T) {
var orgID int64 = 1
rule := dummyRule("test#4", orgID)
rule.RuleGroup = "b"
- rule, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone)
+ rule, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone, 0)
require.NoError(t, err)
var interval int64 = 120
@@ -59,10 +63,11 @@ func TestAlertRuleService(t *testing.T) {
rule = dummyRule("test#4-1", orgID)
rule.RuleGroup = "b"
- rule, err = ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone)
+ rule, err = ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone, 0)
require.NoError(t, err)
require.Equal(t, interval, rule.IntervalSeconds)
})
+
t.Run("updating a rule group should bump the version number", func(t *testing.T) {
const (
orgID = 123
@@ -75,7 +80,7 @@ func TestAlertRuleService(t *testing.T) {
rule.UID = ruleUID
rule.RuleGroup = ruleGroup
rule.NamespaceUID = namespaceUID
- _, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone)
+ _, err := ruleService.CreateAlertRule(context.Background(), rule, models.ProvenanceNone, 0)
require.NoError(t, err)
rule, _, err = ruleService.GetAlertRule(context.Background(), orgID, ruleUID)
@@ -91,6 +96,7 @@ func TestAlertRuleService(t *testing.T) {
require.Equal(t, int64(2), rule.Version)
require.Equal(t, newInterval, rule.IntervalSeconds)
})
+
t.Run("alert rule provenace should be correctly checked", func(t *testing.T) {
tests := []struct {
name string
@@ -139,7 +145,7 @@ func TestAlertRuleService(t *testing.T) {
t.Run(test.name, func(t *testing.T) {
var orgID int64 = 1
rule := dummyRule(t.Name(), orgID)
- rule, err := ruleService.CreateAlertRule(context.Background(), rule, test.from)
+ rule, err := ruleService.CreateAlertRule(context.Background(), rule, test.from, 0)
require.NoError(t, err)
_, err = ruleService.UpdateAlertRule(context.Background(), rule, test.to)
@@ -151,6 +157,17 @@ func TestAlertRuleService(t *testing.T) {
})
}
})
+
+ t.Run("quota met causes create to be rejected", func(t *testing.T) {
+ ruleService := createAlertRuleService(t)
+ checker := &MockQuotaChecker{}
+ checker.EXPECT().LimitExceeded()
+ ruleService.quotas = checker
+
+ _, err := ruleService.CreateAlertRule(context.Background(), dummyRule("test#1", 1), models.ProvenanceNone, 0)
+
+ require.ErrorIs(t, err, models.ErrQuotaReached)
+ })
}
func createAlertRuleService(t *testing.T) AlertRuleService {
@@ -160,9 +177,12 @@ func createAlertRuleService(t *testing.T) AlertRuleService {
SQLStore: sqlStore,
BaseInterval: time.Second * 10,
}
+ quotas := MockQuotaChecker{}
+ quotas.EXPECT().LimitOK()
return AlertRuleService{
ruleStore: store,
provenanceStore: store,
+ quotas: "as,
xact: sqlStore,
log: log.New("testing"),
baseIntervalSeconds: 10,
diff --git a/pkg/services/ngalert/provisioning/contactpoints.go b/pkg/services/ngalert/provisioning/contactpoints.go
index 879666d5f4c..e948eb649d5 100644
--- a/pkg/services/ngalert/provisioning/contactpoints.go
+++ b/pkg/services/ngalert/provisioning/contactpoints.go
@@ -34,17 +34,27 @@ func NewContactPointService(store AMConfigStore, encryptionService secrets.Servi
}
}
-func (ecp *ContactPointService) GetContactPoints(ctx context.Context, orgID int64) ([]apimodels.EmbeddedContactPoint, error) {
- revision, err := getLastConfiguration(ctx, orgID, ecp.amStore)
+type ContactPointQuery struct {
+ // Optionally filter by name.
+ Name string
+ OrgID int64
+}
+
+func (ecp *ContactPointService) GetContactPoints(ctx context.Context, q ContactPointQuery) ([]apimodels.EmbeddedContactPoint, error) {
+ revision, err := getLastConfiguration(ctx, q.OrgID, ecp.amStore)
if err != nil {
return nil, err
}
- provenances, err := ecp.provenanceStore.GetProvenances(ctx, orgID, "contactPoint")
+ provenances, err := ecp.provenanceStore.GetProvenances(ctx, q.OrgID, "contactPoint")
if err != nil {
return nil, err
}
contactPoints := []apimodels.EmbeddedContactPoint{}
for _, contactPoint := range revision.cfg.GetGrafanaReceiverMap() {
+ if q.Name != "" && contactPoint.Name != q.Name {
+ continue
+ }
+
embeddedContactPoint := apimodels.EmbeddedContactPoint{
UID: contactPoint.UID,
Type: contactPoint.Type,
@@ -66,6 +76,7 @@ func (ecp *ContactPointService) GetContactPoints(ctx context.Context, orgID int6
}
embeddedContactPoint.Settings.Set(k, apimodels.RedactedValue)
}
+
contactPoints = append(contactPoints, embeddedContactPoint)
}
sort.SliceStable(contactPoints, func(i, j int) bool {
diff --git a/pkg/services/ngalert/provisioning/contactpoints_test.go b/pkg/services/ngalert/provisioning/contactpoints_test.go
index 17a24735a34..1ee91f59b1b 100644
--- a/pkg/services/ngalert/provisioning/contactpoints_test.go
+++ b/pkg/services/ngalert/provisioning/contactpoints_test.go
@@ -22,7 +22,24 @@ func TestContactPointService(t *testing.T) {
t.Run("service gets contact points from AM config", func(t *testing.T) {
sut := createContactPointServiceSut(secretsService)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
+ require.NoError(t, err)
+
+ require.Len(t, cps, 1)
+ require.Equal(t, "email receiver", cps[0].Name)
+ })
+
+ t.Run("service filters contact points by name", func(t *testing.T) {
+ sut := createContactPointServiceSut(secretsService)
+ newCp := createTestContactPoint()
+ _, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceAPI)
+ require.NoError(t, err)
+
+ q := ContactPointQuery{
+ OrgID: 1,
+ Name: "email receiver",
+ }
+ cps, err := sut.GetContactPoints(context.Background(), q)
require.NoError(t, err)
require.Len(t, cps, 1)
@@ -36,7 +53,7 @@ func TestContactPointService(t *testing.T) {
_, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceAPI)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Len(t, cps, 2)
require.Equal(t, "test-contact-point", cps[1].Name)
@@ -52,7 +69,7 @@ func TestContactPointService(t *testing.T) {
_, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceAPI)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Len(t, cps, 2)
require.Equal(t, customUID, cps[1].UID)
@@ -120,7 +137,7 @@ func TestContactPointService(t *testing.T) {
t.Run("default provenance of contact points is none", func(t *testing.T) {
sut := createContactPointServiceSut(secretsService)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, models.ProvenanceNone, models.Provenance(cps[0].Provenance))
@@ -133,7 +150,7 @@ func TestContactPointService(t *testing.T) {
newCp, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceNone)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceNone, models.Provenance(cps[1].Provenance))
@@ -141,7 +158,7 @@ func TestContactPointService(t *testing.T) {
err = sut.UpdateContactPoint(context.Background(), 1, newCp, models.ProvenanceAPI)
require.NoError(t, err)
- cps, err = sut.GetContactPoints(context.Background(), 1)
+ cps, err = sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceAPI, models.Provenance(cps[1].Provenance))
@@ -154,7 +171,7 @@ func TestContactPointService(t *testing.T) {
newCp, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceNone)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceNone, models.Provenance(cps[1].Provenance))
@@ -162,7 +179,7 @@ func TestContactPointService(t *testing.T) {
err = sut.UpdateContactPoint(context.Background(), 1, newCp, models.ProvenanceFile)
require.NoError(t, err)
- cps, err = sut.GetContactPoints(context.Background(), 1)
+ cps, err = sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceFile, models.Provenance(cps[1].Provenance))
@@ -175,7 +192,7 @@ func TestContactPointService(t *testing.T) {
newCp, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceFile)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceFile, models.Provenance(cps[1].Provenance))
@@ -191,7 +208,7 @@ func TestContactPointService(t *testing.T) {
newCp, err := sut.CreateContactPoint(context.Background(), 1, newCp, models.ProvenanceAPI)
require.NoError(t, err)
- cps, err := sut.GetContactPoints(context.Background(), 1)
+ cps, err := sut.GetContactPoints(context.Background(), cpsQuery(1))
require.NoError(t, err)
require.Equal(t, newCp.UID, cps[1].UID)
require.Equal(t, models.ProvenanceAPI, models.Provenance(cps[1].Provenance))
@@ -269,6 +286,12 @@ func createTestContactPoint() definitions.EmbeddedContactPoint {
}
}
+func cpsQuery(orgID int64) ContactPointQuery {
+ return ContactPointQuery{
+ OrgID: orgID,
+ }
+}
+
func TestStitchReceivers(t *testing.T) {
type testCase struct {
name string
diff --git a/pkg/services/ngalert/provisioning/notification_policies_test.go b/pkg/services/ngalert/provisioning/notification_policies_test.go
index da7211791e4..27b0de8b62a 100644
--- a/pkg/services/ngalert/provisioning/notification_policies_test.go
+++ b/pkg/services/ngalert/provisioning/notification_policies_test.go
@@ -223,7 +223,7 @@ func TestNotificationPolicyService(t *testing.T) {
require.NoError(t, err)
require.Equal(t, "grafana-default-email", tree.Receiver)
require.Nil(t, tree.Routes)
- require.Nil(t, tree.GroupBy)
+ require.Equal(t, []model.LabelName{models.FolderTitleLabel, model.AlertNameLabel}, tree.GroupBy)
})
}
diff --git a/pkg/services/ngalert/provisioning/persist.go b/pkg/services/ngalert/provisioning/persist.go
index da940e39989..ed99265b7de 100644
--- a/pkg/services/ngalert/provisioning/persist.go
+++ b/pkg/services/ngalert/provisioning/persist.go
@@ -5,6 +5,7 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/ngalert/store"
+ "github.com/grafana/grafana/pkg/services/quota"
)
// AMStore is a store of Alertmanager configurations.
@@ -37,3 +38,9 @@ type RuleStore interface {
UpdateAlertRules(ctx context.Context, rule []store.UpdateRule) error
DeleteAlertRulesByUID(ctx context.Context, orgID int64, ruleUID ...string) error
}
+
+// QuotaChecker represents the ability to evaluate whether quotas are met.
+//go:generate mockery --name QuotaChecker --structname MockQuotaChecker --inpackage --filename quota_checker_mock.go --with-expecter
+type QuotaChecker interface {
+ CheckQuotaReached(ctx context.Context, target string, scopeParams *quota.ScopeParameters) (bool, error)
+}
diff --git a/pkg/services/ngalert/provisioning/quota_checker_mock.go b/pkg/services/ngalert/provisioning/quota_checker_mock.go
new file mode 100644
index 00000000000..0d52a5bf207
--- /dev/null
+++ b/pkg/services/ngalert/provisioning/quota_checker_mock.go
@@ -0,0 +1,81 @@
+// Code generated by mockery v2.12.0. DO NOT EDIT.
+
+package provisioning
+
+import (
+ context "context"
+
+ quota "github.com/grafana/grafana/pkg/services/quota"
+ mock "github.com/stretchr/testify/mock"
+
+ testing "testing"
+)
+
+// MockQuotaChecker is an autogenerated mock type for the QuotaChecker type
+type MockQuotaChecker struct {
+ mock.Mock
+}
+
+type MockQuotaChecker_Expecter struct {
+ mock *mock.Mock
+}
+
+func (_m *MockQuotaChecker) EXPECT() *MockQuotaChecker_Expecter {
+ return &MockQuotaChecker_Expecter{mock: &_m.Mock}
+}
+
+// CheckQuotaReached provides a mock function with given fields: ctx, target, scopeParams
+func (_m *MockQuotaChecker) CheckQuotaReached(ctx context.Context, target string, scopeParams *quota.ScopeParameters) (bool, error) {
+ ret := _m.Called(ctx, target, scopeParams)
+
+ var r0 bool
+ if rf, ok := ret.Get(0).(func(context.Context, string, *quota.ScopeParameters) bool); ok {
+ r0 = rf(ctx, target, scopeParams)
+ } else {
+ r0 = ret.Get(0).(bool)
+ }
+
+ var r1 error
+ if rf, ok := ret.Get(1).(func(context.Context, string, *quota.ScopeParameters) error); ok {
+ r1 = rf(ctx, target, scopeParams)
+ } else {
+ r1 = ret.Error(1)
+ }
+
+ return r0, r1
+}
+
+// MockQuotaChecker_CheckQuotaReached_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'CheckQuotaReached'
+type MockQuotaChecker_CheckQuotaReached_Call struct {
+ *mock.Call
+}
+
+// CheckQuotaReached is a helper method to define mock.On call
+// - ctx context.Context
+// - target string
+// - scopeParams *quota.ScopeParameters
+func (_e *MockQuotaChecker_Expecter) CheckQuotaReached(ctx interface{}, target interface{}, scopeParams interface{}) *MockQuotaChecker_CheckQuotaReached_Call {
+ return &MockQuotaChecker_CheckQuotaReached_Call{Call: _e.mock.On("CheckQuotaReached", ctx, target, scopeParams)}
+}
+
+func (_c *MockQuotaChecker_CheckQuotaReached_Call) Run(run func(ctx context.Context, target string, scopeParams *quota.ScopeParameters)) *MockQuotaChecker_CheckQuotaReached_Call {
+ _c.Call.Run(func(args mock.Arguments) {
+ run(args[0].(context.Context), args[1].(string), args[2].(*quota.ScopeParameters))
+ })
+ return _c
+}
+
+func (_c *MockQuotaChecker_CheckQuotaReached_Call) Return(_a0 bool, _a1 error) *MockQuotaChecker_CheckQuotaReached_Call {
+ _c.Call.Return(_a0, _a1)
+ return _c
+}
+
+// NewMockQuotaChecker creates a new instance of MockQuotaChecker. It also registers the testing.TB interface on the mock and a cleanup function to assert the mocks expectations.
+func NewMockQuotaChecker(t testing.TB) *MockQuotaChecker {
+ mock := &MockQuotaChecker{}
+ mock.Mock.Test(t)
+
+ t.Cleanup(func() { mock.AssertExpectations(t) })
+
+ return mock
+}
diff --git a/pkg/services/ngalert/provisioning/testing.go b/pkg/services/ngalert/provisioning/testing.go
index d303cfb1f25..28601edb95a 100644
--- a/pkg/services/ngalert/provisioning/testing.go
+++ b/pkg/services/ngalert/provisioning/testing.go
@@ -170,3 +170,13 @@ func (m *MockProvisioningStore_Expecter) SaveSucceeds() *MockProvisioningStore_E
m.DeleteProvenance(mock.Anything, mock.Anything, mock.Anything).Return(nil)
return m
}
+
+func (m *MockQuotaChecker_Expecter) LimitOK() *MockQuotaChecker_Expecter {
+ m.CheckQuotaReached(mock.Anything, mock.Anything, mock.Anything).Return(false, nil)
+ return m
+}
+
+func (m *MockQuotaChecker_Expecter) LimitExceeded() *MockQuotaChecker_Expecter {
+ m.CheckQuotaReached(mock.Anything, mock.Anything, mock.Anything).Return(true, nil)
+ return m
+}
diff --git a/pkg/services/ngalert/schedule/alerts_sender_mock.go b/pkg/services/ngalert/schedule/alerts_sender_mock.go
new file mode 100644
index 00000000000..04acf185a3e
--- /dev/null
+++ b/pkg/services/ngalert/schedule/alerts_sender_mock.go
@@ -0,0 +1,52 @@
+// Code generated by mockery v2.10.0. DO NOT EDIT.
+
+package schedule
+
+import (
+ definitions "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
+ mock "github.com/stretchr/testify/mock"
+
+ models "github.com/grafana/grafana/pkg/services/ngalert/models"
+)
+
+// AlertsSenderMock is an autogenerated mock type for the AlertsSender type
+type AlertsSenderMock struct {
+ mock.Mock
+}
+
+type AlertsSenderMock_Expecter struct {
+ mock *mock.Mock
+}
+
+func (_m *AlertsSenderMock) EXPECT() *AlertsSenderMock_Expecter {
+ return &AlertsSenderMock_Expecter{mock: &_m.Mock}
+}
+
+// Send provides a mock function with given fields: key, alerts
+func (_m *AlertsSenderMock) Send(key models.AlertRuleKey, alerts definitions.PostableAlerts) {
+ _m.Called(key, alerts)
+}
+
+// AlertsSenderMock_Send_Call is a *mock.Call that shadows Run/Return methods with type explicit version for method 'Send'
+type AlertsSenderMock_Send_Call struct {
+ *mock.Call
+}
+
+// Send is a helper method to define mock.On call
+// - key models.AlertRuleKey
+// - alerts definitions.PostableAlerts
+func (_e *AlertsSenderMock_Expecter) Send(key interface{}, alerts interface{}) *AlertsSenderMock_Send_Call {
+ return &AlertsSenderMock_Send_Call{Call: _e.mock.On("Send", key, alerts)}
+}
+
+func (_c *AlertsSenderMock_Send_Call) Run(run func(key models.AlertRuleKey, alerts definitions.PostableAlerts)) *AlertsSenderMock_Send_Call {
+ _c.Call.Run(func(args mock.Arguments) {
+ run(args[0].(models.AlertRuleKey), args[1].(definitions.PostableAlerts))
+ })
+ return _c
+}
+
+func (_c *AlertsSenderMock_Send_Call) Return() *AlertsSenderMock_Send_Call {
+ _c.Call.Return()
+ return _c
+}
diff --git a/pkg/services/ngalert/schedule/schedule.go b/pkg/services/ngalert/schedule/schedule.go
index 9483cc4c72a..deb0d2b09e3 100644
--- a/pkg/services/ngalert/schedule/schedule.go
+++ b/pkg/services/ngalert/schedule/schedule.go
@@ -2,10 +2,8 @@ package schedule
import (
"context"
- "errors"
"fmt"
"net/url"
- "sync"
"time"
"github.com/grafana/grafana/pkg/bus"
@@ -17,10 +15,9 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/eval"
"github.com/grafana/grafana/pkg/services/ngalert/metrics"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
- "github.com/grafana/grafana/pkg/services/ngalert/notifier"
- "github.com/grafana/grafana/pkg/services/ngalert/sender"
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/store"
+ "github.com/grafana/grafana/pkg/setting"
"github.com/benbjohnson/clock"
"golang.org/x/sync/errgroup"
@@ -33,14 +30,6 @@ type ScheduleService interface {
// Run the scheduler until the context is canceled or the scheduler returns
// an error. The scheduler is terminated when this function returns.
Run(context.Context) error
-
- // AlertmanagersFor returns all the discovered Alertmanager URLs for the
- // organization.
- AlertmanagersFor(orgID int64) []*url.URL
-
- // DroppedAlertmanagersFor returns all the dropped Alertmanager URLs for the
- // organization.
- DroppedAlertmanagersFor(orgID int64) []*url.URL
// UpdateAlertRule notifies scheduler that a rule has been changed
UpdateAlertRule(key ngmodels.AlertRuleKey)
// UpdateAlertRulesByNamespaceUID notifies scheduler that all rules in a namespace should be updated.
@@ -55,6 +44,12 @@ type ScheduleService interface {
folderUpdateHandler(ctx context.Context, evt *events.FolderUpdated) error
}
+//go:generate mockery --name AlertsSender --structname AlertsSenderMock --inpackage --filename alerts_sender_mock.go --with-expecter
+// AlertsSender is an interface for a service that is responsible for sending notifications to the end-user.
+type AlertsSender interface {
+ Send(key ngmodels.AlertRuleKey, alerts definitions.PostableAlerts)
+}
+
type schedule struct {
// base tick rate (fastest possible configured check)
baseInterval time.Duration
@@ -82,26 +77,19 @@ type schedule struct {
evaluator eval.Evaluator
- ruleStore store.RuleStore
- instanceStore store.InstanceStore
- adminConfigStore store.AdminConfigurationStore
- orgStore store.OrgStore
+ ruleStore store.RuleStore
+ instanceStore store.InstanceStore
stateManager *state.Manager
- appURL *url.URL
+ appURL *url.URL
+ disableGrafanaFolder bool
- multiOrgNotifier *notifier.MultiOrgAlertmanager
- metrics *metrics.Scheduler
+ metrics *metrics.Scheduler
- // Senders help us send alerts to external Alertmanagers.
- adminConfigMtx sync.RWMutex
- sendAlertsTo map[int64]ngmodels.AlertmanagersChoice
- sendersCfgHash map[int64]string
- senders map[int64]*sender.Sender
- adminConfigPollInterval time.Duration
- disabledOrgs map[int64]struct{}
- minRuleInterval time.Duration
+ alertsSender AlertsSender
+ disabledOrgs map[int64]struct{}
+ minRuleInterval time.Duration
// schedulableAlertRules contains the alert rules that are considered for
// evaluation in the current tick. The evaluation of an alert rule in the
@@ -115,54 +103,43 @@ type schedule struct {
// SchedulerCfg is the scheduler configuration.
type SchedulerCfg struct {
- C clock.Clock
- BaseInterval time.Duration
- Logger log.Logger
- EvalAppliedFunc func(ngmodels.AlertRuleKey, time.Time)
- MaxAttempts int64
- StopAppliedFunc func(ngmodels.AlertRuleKey)
- Evaluator eval.Evaluator
- RuleStore store.RuleStore
- OrgStore store.OrgStore
- InstanceStore store.InstanceStore
- AdminConfigStore store.AdminConfigurationStore
- MultiOrgNotifier *notifier.MultiOrgAlertmanager
- Metrics *metrics.Scheduler
- AdminConfigPollInterval time.Duration
- DisabledOrgs map[int64]struct{}
- MinRuleInterval time.Duration
+ Cfg setting.UnifiedAlertingSettings
+ C clock.Clock
+ Logger log.Logger
+ EvalAppliedFunc func(ngmodels.AlertRuleKey, time.Time)
+ StopAppliedFunc func(ngmodels.AlertRuleKey)
+ Evaluator eval.Evaluator
+ RuleStore store.RuleStore
+ InstanceStore store.InstanceStore
+ Metrics *metrics.Scheduler
+ AlertSender AlertsSender
}
// NewScheduler returns a new schedule.
func NewScheduler(cfg SchedulerCfg, appURL *url.URL, stateManager *state.Manager, bus bus.Bus) *schedule {
- ticker := alerting.NewTicker(cfg.C, cfg.BaseInterval, cfg.Metrics.Ticker)
+ ticker := alerting.NewTicker(cfg.C, cfg.Cfg.BaseInterval, cfg.Metrics.Ticker)
sch := schedule{
- registry: alertRuleInfoRegistry{alertRuleInfo: make(map[ngmodels.AlertRuleKey]*alertRuleInfo)},
- maxAttempts: cfg.MaxAttempts,
- clock: cfg.C,
- baseInterval: cfg.BaseInterval,
- log: cfg.Logger,
- ticker: ticker,
- evalAppliedFunc: cfg.EvalAppliedFunc,
- stopAppliedFunc: cfg.StopAppliedFunc,
- evaluator: cfg.Evaluator,
- ruleStore: cfg.RuleStore,
- instanceStore: cfg.InstanceStore,
- orgStore: cfg.OrgStore,
- adminConfigStore: cfg.AdminConfigStore,
- multiOrgNotifier: cfg.MultiOrgNotifier,
- metrics: cfg.Metrics,
- appURL: appURL,
- stateManager: stateManager,
- sendAlertsTo: map[int64]ngmodels.AlertmanagersChoice{},
- senders: map[int64]*sender.Sender{},
- sendersCfgHash: map[int64]string{},
- adminConfigPollInterval: cfg.AdminConfigPollInterval,
- disabledOrgs: cfg.DisabledOrgs,
- minRuleInterval: cfg.MinRuleInterval,
- schedulableAlertRules: schedulableAlertRulesRegistry{rules: make(map[ngmodels.AlertRuleKey]*ngmodels.SchedulableAlertRule)},
- bus: bus,
+ registry: alertRuleInfoRegistry{alertRuleInfo: make(map[ngmodels.AlertRuleKey]*alertRuleInfo)},
+ maxAttempts: cfg.Cfg.MaxAttempts,
+ clock: cfg.C,
+ baseInterval: cfg.Cfg.BaseInterval,
+ log: cfg.Logger,
+ ticker: ticker,
+ evalAppliedFunc: cfg.EvalAppliedFunc,
+ stopAppliedFunc: cfg.StopAppliedFunc,
+ evaluator: cfg.Evaluator,
+ ruleStore: cfg.RuleStore,
+ instanceStore: cfg.InstanceStore,
+ metrics: cfg.Metrics,
+ appURL: appURL,
+ disableGrafanaFolder: cfg.Cfg.ReservedLabels.IsReservedLabelDisabled(ngmodels.FolderTitleLabel),
+ stateManager: stateManager,
+ disabledOrgs: cfg.Cfg.DisabledOrgs,
+ minRuleInterval: cfg.Cfg.MinInterval,
+ schedulableAlertRules: schedulableAlertRulesRegistry{rules: make(map[ngmodels.AlertRuleKey]*ngmodels.SchedulableAlertRule)},
+ bus: bus,
+ alertsSender: cfg.AlertSender,
}
bus.AddEventListener(sch.folderUpdateHandler)
@@ -171,158 +148,14 @@ func NewScheduler(cfg SchedulerCfg, appURL *url.URL, stateManager *state.Manager
}
func (sch *schedule) Run(ctx context.Context) error {
- var wg sync.WaitGroup
- wg.Add(2)
-
defer sch.ticker.Stop()
- go func() {
- defer wg.Done()
- if err := sch.schedulePeriodic(ctx); err != nil {
- sch.log.Error("failure while running the rule evaluation loop", "err", err)
- }
- }()
-
- go func() {
- defer wg.Done()
- if err := sch.adminConfigSync(ctx); err != nil {
- sch.log.Error("failure while running the admin configuration sync", "err", err)
- }
- }()
-
- wg.Wait()
- return nil
-}
-
-// SyncAndApplyConfigFromDatabase looks for the admin configuration in the database
-// and adjusts the sender(s) and alert handling mechanism accordingly.
-func (sch *schedule) SyncAndApplyConfigFromDatabase() error {
- sch.log.Debug("start of admin configuration sync")
- cfgs, err := sch.adminConfigStore.GetAdminConfigurations()
- if err != nil {
- return err
+ if err := sch.schedulePeriodic(ctx); err != nil {
+ sch.log.Error("failure while running the rule evaluation loop", "err", err)
}
-
- sch.log.Debug("found admin configurations", "count", len(cfgs))
-
- orgsFound := make(map[int64]struct{}, len(cfgs))
- sch.adminConfigMtx.Lock()
- for _, cfg := range cfgs {
- _, isDisabledOrg := sch.disabledOrgs[cfg.OrgID]
- if isDisabledOrg {
- sch.log.Debug("skipping starting sender for disabled org", "org", cfg.OrgID)
- continue
- }
-
- // Update the Alertmanagers choice for the organization.
- sch.sendAlertsTo[cfg.OrgID] = cfg.SendAlertsTo
-
- orgsFound[cfg.OrgID] = struct{}{} // keep track of the which senders we need to keep.
-
- existing, ok := sch.senders[cfg.OrgID]
-
- // We have no running sender and no Alertmanager(s) configured, no-op.
- if !ok && len(cfg.Alertmanagers) == 0 {
- sch.log.Debug("no external alertmanagers configured", "org", cfg.OrgID)
- continue
- }
- // We have no running sender and alerts are handled internally, no-op.
- if !ok && cfg.SendAlertsTo == ngmodels.InternalAlertmanager {
- sch.log.Debug("alerts are handled internally", "org", cfg.OrgID)
- continue
- }
-
- // We have a running sender but no Alertmanager(s) configured, shut it down.
- if ok && len(cfg.Alertmanagers) == 0 {
- sch.log.Debug("no external alertmanager(s) configured, sender will be stopped", "org", cfg.OrgID)
- delete(orgsFound, cfg.OrgID)
- continue
- }
-
- // We have a running sender, check if we need to apply a new config.
- if ok {
- if sch.sendersCfgHash[cfg.OrgID] == cfg.AsSHA256() {
- sch.log.Debug("sender configuration is the same as the one running, no-op", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
- continue
- }
-
- sch.log.Debug("applying new configuration to sender", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
- err := existing.ApplyConfig(cfg)
- if err != nil {
- sch.log.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
- continue
- }
- sch.sendersCfgHash[cfg.OrgID] = cfg.AsSHA256()
- continue
- }
-
- // No sender and have Alertmanager(s) to send to - start a new one.
- sch.log.Info("creating new sender for the external alertmanagers", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
- s, err := sender.New(sch.metrics)
- if err != nil {
- sch.log.Error("unable to start the sender", "err", err, "org", cfg.OrgID)
- continue
- }
-
- sch.senders[cfg.OrgID] = s
- s.Run()
-
- err = s.ApplyConfig(cfg)
- if err != nil {
- sch.log.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
- continue
- }
-
- sch.sendersCfgHash[cfg.OrgID] = cfg.AsSHA256()
- }
-
- sendersToStop := map[int64]*sender.Sender{}
-
- for orgID, s := range sch.senders {
- if _, exists := orgsFound[orgID]; !exists {
- sendersToStop[orgID] = s
- delete(sch.senders, orgID)
- delete(sch.sendersCfgHash, orgID)
- }
- }
- sch.adminConfigMtx.Unlock()
-
- // We can now stop these senders w/o having to hold a lock.
- for orgID, s := range sendersToStop {
- sch.log.Info("stopping sender", "org", orgID)
- s.Stop()
- sch.log.Info("stopped sender", "org", orgID)
- }
-
- sch.log.Debug("finish of admin configuration sync")
-
return nil
}
-// AlertmanagersFor returns all the discovered Alertmanager(s) for a particular organization.
-func (sch *schedule) AlertmanagersFor(orgID int64) []*url.URL {
- sch.adminConfigMtx.RLock()
- defer sch.adminConfigMtx.RUnlock()
- s, ok := sch.senders[orgID]
- if !ok {
- return []*url.URL{}
- }
-
- return s.Alertmanagers()
-}
-
-// DroppedAlertmanagersFor returns all the dropped Alertmanager(s) for a particular organization.
-func (sch *schedule) DroppedAlertmanagersFor(orgID int64) []*url.URL {
- sch.adminConfigMtx.RLock()
- defer sch.adminConfigMtx.RUnlock()
- s, ok := sch.senders[orgID]
- if !ok {
- return []*url.URL{}
- }
-
- return s.DroppedAlertmanagers()
-}
-
// UpdateAlertRule looks for the active rule evaluation and commands it to update the rule
func (sch *schedule) UpdateAlertRule(key ngmodels.AlertRuleKey) {
ruleInfo, err := sch.registry.get(key)
@@ -376,27 +209,6 @@ func (sch *schedule) DeleteAlertRule(key ngmodels.AlertRuleKey) {
sch.metrics.SchedulableAlertRulesHash.Set(float64(hashUIDs(alertRules)))
}
-func (sch *schedule) adminConfigSync(ctx context.Context) error {
- for {
- select {
- case <-time.After(sch.adminConfigPollInterval):
- if err := sch.SyncAndApplyConfigFromDatabase(); err != nil {
- sch.log.Error("unable to sync admin configuration", "err", err)
- }
- case <-ctx.Done():
- // Stop sending alerts to all external Alertmanager(s).
- sch.adminConfigMtx.Lock()
- for orgID, s := range sch.senders {
- delete(sch.senders, orgID) // delete before we stop to make sure we don't accept any more alerts.
- s.Stop()
- }
- sch.adminConfigMtx.Unlock()
-
- return nil
- }
- }
-}
-
func (sch *schedule) schedulePeriodic(ctx context.Context) error {
dispatcherGroup, ctx := errgroup.WithContext(ctx)
for {
@@ -522,7 +334,6 @@ func (sch *schedule) schedulePeriodic(ctx context.Context) error {
}
}
-//nolint: gocyclo
func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertRuleKey, evalCh <-chan *evaluation, updateCh <-chan struct{}) error {
logger := sch.log.New("uid", key.UID, "org", key.OrgID)
logger.Debug("alert rule routine started")
@@ -532,55 +343,11 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertR
evalDuration := sch.metrics.EvalDuration.WithLabelValues(orgID)
evalTotalFailures := sch.metrics.EvalFailures.WithLabelValues(orgID)
- notify := func(alerts definitions.PostableAlerts, logger log.Logger) {
- if len(alerts.PostableAlerts) == 0 {
- logger.Debug("no alerts to put in the notifier or to send to external Alertmanager(s)")
- return
- }
-
- // Send alerts to local notifier if they need to be handled internally
- // or if no external AMs have been discovered yet.
- var localNotifierExist, externalNotifierExist bool
- if sch.sendAlertsTo[key.OrgID] == ngmodels.ExternalAlertmanagers && len(sch.AlertmanagersFor(key.OrgID)) > 0 {
- logger.Debug("no alerts to put in the notifier")
- } else {
- logger.Debug("sending alerts to local notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts)
- n, err := sch.multiOrgNotifier.AlertmanagerFor(key.OrgID)
- if err == nil {
- localNotifierExist = true
- if err := n.PutAlerts(alerts); err != nil {
- logger.Error("failed to put alerts in the local notifier", "count", len(alerts.PostableAlerts), "err", err)
- }
- } else {
- if errors.Is(err, notifier.ErrNoAlertmanagerForOrg) {
- logger.Debug("local notifier was not found")
- } else {
- logger.Error("local notifier is not available", "err", err)
- }
- }
- }
-
- // Send alerts to external Alertmanager(s) if we have a sender for this organization
- // and alerts are not being handled just internally.
- sch.adminConfigMtx.RLock()
- defer sch.adminConfigMtx.RUnlock()
- s, ok := sch.senders[key.OrgID]
- if ok && sch.sendAlertsTo[key.OrgID] != ngmodels.InternalAlertmanager {
- logger.Debug("sending alerts to external notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts)
- s.SendAlerts(alerts)
- externalNotifierExist = true
- }
-
- if !localNotifierExist && !externalNotifierExist {
- logger.Error("no external or internal notifier - alerts not delivered!", "count", len(alerts.PostableAlerts))
- }
- }
-
clearState := func() {
states := sch.stateManager.GetStatesForRuleUID(key.OrgID, key.UID)
expiredAlerts := FromAlertsStateToStoppedAlert(states, sch.appURL, sch.clock)
sch.stateManager.RemoveByRuleUID(key.OrgID, key.UID)
- notify(expiredAlerts, logger)
+ sch.alertsSender.Send(key, expiredAlerts)
}
updateRule := func(ctx context.Context, oldRule *ngmodels.AlertRule) (*ngmodels.AlertRule, error) {
@@ -600,49 +367,43 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertR
OrgId: key.OrgID,
}
- folder, err := sch.ruleStore.GetNamespaceByUID(ctx, q.Result.NamespaceUID, q.Result.OrgID, user)
- if err != nil {
- logger.Error("failed to fetch alert rule namespace", "err", err)
- return nil, err
- }
+ if !sch.disableGrafanaFolder {
+ folder, err := sch.ruleStore.GetNamespaceByUID(ctx, q.Result.NamespaceUID, q.Result.OrgID, user)
+ if err != nil {
+ logger.Error("failed to fetch alert rule namespace", "err", err)
+ return nil, err
+ }
- if q.Result.Labels == nil {
- q.Result.Labels = make(map[string]string)
- } else if val, ok := q.Result.Labels[ngmodels.FolderTitleLabel]; ok {
- logger.Warn("alert rule contains protected label, value will be overwritten", "label", ngmodels.FolderTitleLabel, "value", val)
+ if q.Result.Labels == nil {
+ q.Result.Labels = make(map[string]string)
+ } else if val, ok := q.Result.Labels[ngmodels.FolderTitleLabel]; ok {
+ logger.Warn("alert rule contains protected label, value will be overwritten", "label", ngmodels.FolderTitleLabel, "value", val)
+ }
+ q.Result.Labels[ngmodels.FolderTitleLabel] = folder.Title
}
- q.Result.Labels[ngmodels.FolderTitleLabel] = folder.Title
return q.Result, nil
}
- evaluate := func(ctx context.Context, r *ngmodels.AlertRule, attempt int64, e *evaluation) error {
+ evaluate := func(ctx context.Context, r *ngmodels.AlertRule, attempt int64, e *evaluation) {
logger := logger.New("version", r.Version, "attempt", attempt, "now", e.scheduledAt)
start := sch.clock.Now()
- condition := ngmodels.Condition{
- Condition: r.Condition,
- OrgID: r.OrgID,
- Data: r.Data,
- }
- results, err := sch.evaluator.ConditionEval(&condition, e.scheduledAt)
+ results := sch.evaluator.ConditionEval(ctx, r.GetEvalCondition(), e.scheduledAt)
dur := sch.clock.Now().Sub(start)
evalTotal.Inc()
evalDuration.Observe(dur.Seconds())
- if err != nil {
+ if results.HasErrors() {
evalTotalFailures.Inc()
- // consider saving alert instance on error
- logger.Error("failed to evaluate alert rule", "duration", dur, "err", err)
- return err
+ logger.Error("failed to evaluate alert rule", "results", results, "duration", dur)
+ } else {
+ logger.Debug("alert rule evaluated", "results", results, "duration", dur)
}
- logger.Debug("alert rule evaluated", "results", results, "duration", dur)
processedStates := sch.stateManager.ProcessEvalResults(ctx, e.scheduledAt, r, results)
sch.saveAlertStates(ctx, processedStates)
alerts := FromAlertStateToPostableAlerts(processedStates, sch.stateManager, sch.appURL)
-
- notify(alerts, logger)
- return nil
+ sch.alertsSender.Send(key, alerts)
}
retryIfError := func(f func(attempt int64) error) error {
@@ -704,7 +465,8 @@ func (sch *schedule) ruleRoutine(grafanaCtx context.Context, key ngmodels.AlertR
currentRule = newRule
logger.Debug("new alert rule version fetched", "title", newRule.Title, "version", newRule.Version)
}
- return evaluate(grafanaCtx, currentRule, attempt, ctx)
+ evaluate(grafanaCtx, currentRule, attempt, ctx)
+ return nil
})
if err != nil {
logger.Error("evaluation failed after all retries", "err", err)
@@ -740,15 +502,18 @@ func (sch *schedule) saveAlertStates(ctx context.Context, states []*state.State)
// folderUpdateHandler listens for folder update events and updates all rules in the given folder.
func (sch *schedule) folderUpdateHandler(ctx context.Context, evt *events.FolderUpdated) error {
+ if sch.disableGrafanaFolder {
+ return nil
+ }
return sch.UpdateAlertRulesByNamespaceUID(ctx, evt.OrgID, evt.UID)
}
// overrideCfg is only used on tests.
func (sch *schedule) overrideCfg(cfg SchedulerCfg) {
sch.clock = cfg.C
- sch.baseInterval = cfg.BaseInterval
+ sch.baseInterval = cfg.Cfg.BaseInterval
sch.ticker.Stop()
- sch.ticker = alerting.NewTicker(cfg.C, cfg.BaseInterval, cfg.Metrics.Ticker)
+ sch.ticker = alerting.NewTicker(cfg.C, cfg.Cfg.BaseInterval, cfg.Metrics.Ticker)
sch.evalAppliedFunc = cfg.EvalAppliedFunc
sch.stopAppliedFunc = cfg.StopAppliedFunc
}
diff --git a/pkg/services/ngalert/schedule/schedule_mock.go b/pkg/services/ngalert/schedule/schedule_mock.go
index 5a45811c36f..ef3b1ccafb3 100644
--- a/pkg/services/ngalert/schedule/schedule_mock.go
+++ b/pkg/services/ngalert/schedule/schedule_mock.go
@@ -1,4 +1,4 @@
-// Code generated by mockery v2.10.2. DO NOT EDIT.
+// Code generated by mockery v2.10.0. DO NOT EDIT.
package schedule
@@ -11,8 +11,6 @@ import (
models "github.com/grafana/grafana/pkg/services/ngalert/models"
time "time"
-
- url "net/url"
)
// FakeScheduleService is an autogenerated mock type for the ScheduleService type
@@ -20,43 +18,11 @@ type FakeScheduleService struct {
mock.Mock
}
-// AlertmanagersFor provides a mock function with given fields: orgID
-func (_m *FakeScheduleService) AlertmanagersFor(orgID int64) []*url.URL {
- ret := _m.Called(orgID)
-
- var r0 []*url.URL
- if rf, ok := ret.Get(0).(func(int64) []*url.URL); ok {
- r0 = rf(orgID)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]*url.URL)
- }
- }
-
- return r0
-}
-
// DeleteAlertRule provides a mock function with given fields: key
func (_m *FakeScheduleService) DeleteAlertRule(key models.AlertRuleKey) {
_m.Called(key)
}
-// DroppedAlertmanagersFor provides a mock function with given fields: orgID
-func (_m *FakeScheduleService) DroppedAlertmanagersFor(orgID int64) []*url.URL {
- ret := _m.Called(orgID)
-
- var r0 []*url.URL
- if rf, ok := ret.Get(0).(func(int64) []*url.URL); ok {
- r0 = rf(orgID)
- } else {
- if ret.Get(0) != nil {
- r0 = ret.Get(0).([]*url.URL)
- }
- }
-
- return r0
-}
-
// Run provides a mock function with given fields: _a0
func (_m *FakeScheduleService) Run(_a0 context.Context) error {
ret := _m.Called(_a0)
diff --git a/pkg/services/ngalert/schedule/schedule_test.go b/pkg/services/ngalert/schedule/schedule_test.go
index 23842932326..75f5a3e5e1f 100644
--- a/pkg/services/ngalert/schedule/schedule_test.go
+++ b/pkg/services/ngalert/schedule/schedule_test.go
@@ -15,6 +15,7 @@ import (
"github.com/grafana/grafana-plugin-sdk-go/data"
"github.com/prometheus/client_golang/prometheus"
"github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/mock"
"github.com/stretchr/testify/require"
busmock "github.com/grafana/grafana/pkg/bus/mock"
@@ -27,6 +28,7 @@ import (
"github.com/grafana/grafana/pkg/services/ngalert/schedule"
"github.com/grafana/grafana/pkg/services/ngalert/state"
"github.com/grafana/grafana/pkg/services/ngalert/tests"
+ "github.com/grafana/grafana/pkg/setting"
)
var testMetrics = metrics.NewNGAlert(prometheus.NewPedanticRegistry())
@@ -98,16 +100,19 @@ func TestWarmStateCache(t *testing.T) {
}
_ = dbstore.SaveAlertInstance(ctx, saveCmd2)
- schedCfg := schedule.SchedulerCfg{
- C: clock.NewMock(),
- BaseInterval: time.Second,
- Logger: log.New("ngalert cache warming test"),
-
- RuleStore: dbstore,
- InstanceStore: dbstore,
- Metrics: testMetrics.GetSchedulerMetrics(),
+ cfg := setting.UnifiedAlertingSettings{
+ BaseInterval: time.Second,
AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
}
+
+ schedCfg := schedule.SchedulerCfg{
+ Cfg: cfg,
+ C: clock.NewMock(),
+ Logger: log.New("ngalert cache warming test"),
+ RuleStore: dbstore,
+ InstanceStore: dbstore,
+ Metrics: testMetrics.GetSchedulerMetrics(),
+ }
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{}, clock.NewMock())
st.Warm(ctx)
@@ -140,25 +145,32 @@ func TestAlertingTicker(t *testing.T) {
stopAppliedCh := make(chan models.AlertRuleKey, len(alerts))
mockedClock := clock.NewMock()
- baseInterval := time.Second
+
+ cfg := setting.UnifiedAlertingSettings{
+ BaseInterval: time.Second,
+ AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
+ DisabledOrgs: map[int64]struct{}{
+ disabledOrgID: {},
+ },
+ }
+
+ notifier := &schedule.AlertsSenderMock{}
+ notifier.EXPECT().Send(mock.Anything, mock.Anything).Return()
schedCfg := schedule.SchedulerCfg{
- C: mockedClock,
- BaseInterval: baseInterval,
+ Cfg: cfg,
+ C: mockedClock,
EvalAppliedFunc: func(alertDefKey models.AlertRuleKey, now time.Time) {
evalAppliedCh <- evalAppliedInfo{alertDefKey: alertDefKey, now: now}
},
StopAppliedFunc: func(alertDefKey models.AlertRuleKey) {
stopAppliedCh <- alertDefKey
},
- RuleStore: dbstore,
- InstanceStore: dbstore,
- Logger: log.New("ngalert schedule test"),
- Metrics: testMetrics.GetSchedulerMetrics(),
- AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
- DisabledOrgs: map[int64]struct{}{
- disabledOrgID: {},
- },
+ RuleStore: dbstore,
+ InstanceStore: dbstore,
+ Logger: log.New("ngalert schedule test"),
+ Metrics: testMetrics.GetSchedulerMetrics(),
+ AlertSender: notifier,
}
st := state.NewManager(schedCfg.Logger, testMetrics.GetStateMetrics(), nil, dbstore, dbstore, &dashboards.FakeDashboardService{}, &image.NoopImageService{}, clock.NewMock())
appUrl := &url.URL{
diff --git a/pkg/services/ngalert/schedule/schedule_unit_test.go b/pkg/services/ngalert/schedule/schedule_unit_test.go
index b7ba1d64799..4f51c12008e 100644
--- a/pkg/services/ngalert/schedule/schedule_unit_test.go
+++ b/pkg/services/ngalert/schedule/schedule_unit_test.go
@@ -53,19 +53,19 @@ func TestSendingToExternalAlertmanager(t *testing.T) {
cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig}
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
- sched, mockedClock := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
+ sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 1, len(sched.senders))
- require.Equal(t, 1, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 1, len(alertsRouter.Senders))
+ require.Equal(t, 1, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we've discovered the Alertmanager.
require.Eventually(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 1 && len(sched.DroppedAlertmanagersFor(1)) == 0
+ return len(alertsRouter.AlertmanagersFor(1)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0
}, 10*time.Second, 200*time.Millisecond)
ctx, cancel := context.WithCancel(context.Background())
@@ -91,15 +91,15 @@ func TestSendingToExternalAlertmanager(t *testing.T) {
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
// Again, make sure we sync and verify the senders.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 0, len(sched.senders))
- require.Equal(t, 0, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 0, len(alertsRouter.Senders))
+ require.Equal(t, 0, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we've dropped the Alertmanager.
require.Eventually(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 0 && len(sched.DroppedAlertmanagersFor(1)) == 0
+ return len(alertsRouter.AlertmanagersFor(1)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0
}, 10*time.Second, 200*time.Millisecond)
}
@@ -115,19 +115,19 @@ func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) {
cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig}
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
- sched, mockedClock := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
+ sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 1, len(sched.senders))
- require.Equal(t, 1, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 1, len(alertsRouter.Senders))
+ require.Equal(t, 1, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we've discovered the Alertmanager.
require.Eventuallyf(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 1 && len(sched.DroppedAlertmanagersFor(1)) == 0
+ return len(alertsRouter.AlertmanagersFor(1)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0
}, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 1 was never discovered")
ctx, cancel := context.WithCancel(context.Background())
@@ -145,15 +145,15 @@ func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) {
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
// If we sync again, new senders must have spawned.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 2, len(sched.senders))
- require.Equal(t, 2, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 2, len(alertsRouter.Senders))
+ require.Equal(t, 2, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we've discovered the Alertmanager for the new organization.
require.Eventuallyf(t, func() bool {
- return len(sched.AlertmanagersFor(2)) == 1 && len(sched.DroppedAlertmanagersFor(2)) == 0
+ return len(alertsRouter.AlertmanagersFor(2)) == 1 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0
}, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 2 was never discovered")
// With everything up and running, let's advance the time to make sure we get at least one alert iteration.
@@ -180,23 +180,23 @@ func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) {
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
// Before we sync, let's grab the existing hash of this particular org.
- sched.adminConfigMtx.Lock()
- currentHash := sched.sendersCfgHash[2]
- sched.adminConfigMtx.Unlock()
+ alertsRouter.AdminConfigMtx.Lock()
+ currentHash := alertsRouter.SendersCfgHash[2]
+ alertsRouter.AdminConfigMtx.Unlock()
// Now, sync again.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
// The hash for org two should not be the same and we should still have two senders.
- sched.adminConfigMtx.Lock()
- require.NotEqual(t, sched.sendersCfgHash[2], currentHash)
- require.Equal(t, 2, len(sched.senders))
- require.Equal(t, 2, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ alertsRouter.AdminConfigMtx.Lock()
+ require.NotEqual(t, alertsRouter.SendersCfgHash[2], currentHash)
+ require.Equal(t, 2, len(alertsRouter.Senders))
+ require.Equal(t, 2, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Wait for the discovery of the new Alertmanager for orgID = 2.
require.Eventuallyf(t, func() bool {
- return len(sched.AlertmanagersFor(2)) == 2 && len(sched.DroppedAlertmanagersFor(2)) == 0
+ return len(alertsRouter.AlertmanagersFor(2)) == 2 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0
}, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 2 was never re-discovered after fix")
// 3. Now, let's provide a configuration that fails for OrgID = 1.
@@ -205,40 +205,40 @@ func TestSendingToExternalAlertmanager_WithMultipleOrgs(t *testing.T) {
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
// Before we sync, let's get the current config hash.
- sched.adminConfigMtx.Lock()
- currentHash = sched.sendersCfgHash[1]
- sched.adminConfigMtx.Unlock()
+ alertsRouter.AdminConfigMtx.Lock()
+ currentHash = alertsRouter.SendersCfgHash[1]
+ alertsRouter.AdminConfigMtx.Unlock()
// Now, sync again.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
// The old configuration should still be running.
- sched.adminConfigMtx.Lock()
- require.Equal(t, sched.sendersCfgHash[1], currentHash)
- sched.adminConfigMtx.Unlock()
- require.Equal(t, 1, len(sched.AlertmanagersFor(1)))
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, alertsRouter.SendersCfgHash[1], currentHash)
+ alertsRouter.AdminConfigMtx.Unlock()
+ require.Equal(t, 1, len(alertsRouter.AlertmanagersFor(1)))
// If we fix it - it should be applied.
adminConfig2 = &models.AdminConfiguration{OrgID: 1, Alertmanagers: []string{"notarealalertmanager:3030"}}
cmd = store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig2}
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.NotEqual(t, sched.sendersCfgHash[1], currentHash)
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.NotEqual(t, alertsRouter.SendersCfgHash[1], currentHash)
+ alertsRouter.AdminConfigMtx.Unlock()
// Finally, remove everything.
require.NoError(t, fakeAdminConfigStore.DeleteAdminConfiguration(1))
require.NoError(t, fakeAdminConfigStore.DeleteAdminConfiguration(2))
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 0, len(sched.senders))
- require.Equal(t, 0, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 0, len(alertsRouter.Senders))
+ require.Equal(t, 0, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
require.Eventuallyf(t, func() bool {
- NoAlertmanagerOrgOne := len(sched.AlertmanagersFor(1)) == 0 && len(sched.DroppedAlertmanagersFor(1)) == 0
- NoAlertmanagerOrgTwo := len(sched.AlertmanagersFor(2)) == 0 && len(sched.DroppedAlertmanagersFor(2)) == 0
+ NoAlertmanagerOrgOne := len(alertsRouter.AlertmanagersFor(1)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(1)) == 0
+ NoAlertmanagerOrgTwo := len(alertsRouter.AlertmanagersFor(2)) == 0 && len(alertsRouter.DroppedAlertmanagersFor(2)) == 0
return NoAlertmanagerOrgOne && NoAlertmanagerOrgTwo
}, 10*time.Second, 200*time.Millisecond, "Alertmanager for org 1 and 2 were never removed")
@@ -260,21 +260,21 @@ func TestChangingAlertmanagersChoice(t *testing.T) {
cmd := store.UpdateAdminConfigurationCmd{AdminConfiguration: adminConfig}
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
- sched, mockedClock := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
+ sched, mockedClock, alertsRouter := setupScheduler(t, fakeRuleStore, fakeInstanceStore, fakeAdminConfigStore, nil)
// Make sure we sync the configuration at least once before the evaluation happens to guarantee the sender is running
// when the first alert triggers.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 1, len(sched.senders))
- require.Equal(t, 1, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 1, len(alertsRouter.Senders))
+ require.Equal(t, 1, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we've discovered the Alertmanager and the Alertmanagers choice is correct.
require.Eventually(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 1 &&
- len(sched.DroppedAlertmanagersFor(1)) == 0 &&
- sched.sendAlertsTo[1] == adminConfig.SendAlertsTo
+ return len(alertsRouter.AlertmanagersFor(1)) == 1 &&
+ len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 &&
+ alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo
}, 10*time.Second, 200*time.Millisecond)
ctx, cancel := context.WithCancel(context.Background())
@@ -290,10 +290,10 @@ func TestChangingAlertmanagersChoice(t *testing.T) {
mockedClock.Add(2 * time.Second)
// Eventually, our Alertmanager should have received alerts.
- require.Eventually(t, func() bool {
+ require.Eventuallyf(t, func() bool {
return fakeAM.AlertsCount() >= 1 &&
fakeAM.AlertNamesCompare([]string{alertRule.Title})
- }, 10*time.Second, 200*time.Millisecond)
+ }, 10*time.Second, 200*time.Millisecond, "expected at least one alert to be received and the title of the first one to be '%s'. but got [%d]: [%v]", alertRule.Title, fakeAM.AlertsCount(), fakeAM.Alerts())
// Now, let's change the Alertmanagers choice to send only to the external Alertmanager.
adminConfig.SendAlertsTo = models.ExternalAlertmanagers
@@ -301,17 +301,17 @@ func TestChangingAlertmanagersChoice(t *testing.T) {
require.NoError(t, fakeAdminConfigStore.UpdateAdminConfiguration(cmd))
// Again, make sure we sync and verify the senders.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 1, len(sched.senders))
- require.Equal(t, 1, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 1, len(alertsRouter.Senders))
+ require.Equal(t, 1, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure we still have the Alertmanager but the Alertmanagers choice has changed.
require.Eventually(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 1 &&
- len(sched.DroppedAlertmanagersFor(1)) == 0 &&
- sched.sendAlertsTo[1] == adminConfig.SendAlertsTo
+ return len(alertsRouter.AlertmanagersFor(1)) == 1 &&
+ len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 &&
+ alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo
}, 10*time.Second, 200*time.Millisecond)
// Finally, let's change the Alertmanagers choice to send only to the internal Alertmanager.
@@ -321,34 +321,34 @@ func TestChangingAlertmanagersChoice(t *testing.T) {
// Again, make sure we sync and verify the senders.
// Senders should be running even though alerts are being handled externally.
- require.NoError(t, sched.SyncAndApplyConfigFromDatabase())
- sched.adminConfigMtx.Lock()
- require.Equal(t, 1, len(sched.senders))
- require.Equal(t, 1, len(sched.sendersCfgHash))
- sched.adminConfigMtx.Unlock()
+ require.NoError(t, alertsRouter.SyncAndApplyConfigFromDatabase())
+ alertsRouter.AdminConfigMtx.Lock()
+ require.Equal(t, 1, len(alertsRouter.Senders))
+ require.Equal(t, 1, len(alertsRouter.SendersCfgHash))
+ alertsRouter.AdminConfigMtx.Unlock()
// Then, ensure the Alertmanager is still listed and the Alertmanagers choice has changed.
require.Eventually(t, func() bool {
- return len(sched.AlertmanagersFor(1)) == 1 &&
- len(sched.DroppedAlertmanagersFor(1)) == 0 &&
- sched.sendAlertsTo[1] == adminConfig.SendAlertsTo
+ return len(alertsRouter.AlertmanagersFor(1)) == 1 &&
+ len(alertsRouter.DroppedAlertmanagersFor(1)) == 0 &&
+ alertsRouter.SendAlertsTo[1] == adminConfig.SendAlertsTo
}, 10*time.Second, 200*time.Millisecond)
}
func TestSchedule_ruleRoutine(t *testing.T) {
createSchedule := func(
evalAppliedChan chan time.Time,
- ) (*schedule, *store.FakeRuleStore, *store.FakeInstanceStore, *store.FakeAdminConfigStore, prometheus.Gatherer) {
+ ) (*schedule, *store.FakeRuleStore, *store.FakeInstanceStore, *store.FakeAdminConfigStore, prometheus.Gatherer, *sender.AlertsRouter) {
ruleStore := store.NewFakeRuleStore(t)
instanceStore := &store.FakeInstanceStore{}
adminConfigStore := store.NewFakeAdminConfigStore(t)
registry := prometheus.NewPedanticRegistry()
- sch, _ := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, registry)
+ sch, _, alertsRouter := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, registry)
sch.evalAppliedFunc = func(key models.AlertRuleKey, t time.Time) {
evalAppliedChan <- t
}
- return sch, ruleStore, instanceStore, adminConfigStore, registry
+ return sch, ruleStore, instanceStore, adminConfigStore, registry, alertsRouter
}
// normal states do not include NoData and Error because currently it is not possible to perform any sensible test
@@ -364,7 +364,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
t.Run(fmt.Sprintf("when rule evaluation happens (evaluation state %s)", evalState), func(t *testing.T) {
evalChan := make(chan *evaluation)
evalAppliedChan := make(chan time.Time)
- sch, ruleStore, instanceStore, _, reg := createSchedule(evalAppliedChan)
+ sch, ruleStore, instanceStore, _, reg, _ := createSchedule(evalAppliedChan)
rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), evalState)
@@ -491,7 +491,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
t.Run("should exit", func(t *testing.T) {
t.Run("when context is cancelled", func(t *testing.T) {
stoppedChan := make(chan error)
- sch, _, _, _, _ := createSchedule(make(chan time.Time))
+ sch, _, _, _, _, _ := createSchedule(make(chan time.Time))
ctx, cancel := context.WithCancel(context.Background())
go func() {
@@ -510,7 +510,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
evalAppliedChan := make(chan time.Time)
ctx := context.Background()
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
+ sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan)
rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState())
@@ -562,7 +562,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
evalChan := make(chan *evaluation)
evalAppliedChan := make(chan time.Time)
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
+ sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan)
rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState())
@@ -614,7 +614,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
evalAppliedChan := make(chan time.Time)
updateChan := make(chan struct{})
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
+ sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan)
rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), eval.Alerting) // we want the alert to fire
@@ -657,7 +657,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
evalAppliedChan := make(chan time.Time)
updateChan := make(chan struct{})
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
+ sch, ruleStore, _, _, _, _ := createSchedule(evalAppliedChan)
sch.maxAttempts = rand.Int63n(4) + 1
rule := CreateTestAlertRule(t, ruleStore, 10, rand.Int63(), randomNormalState())
@@ -693,7 +693,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
defer fakeAM.Close()
orgID := rand.Int63()
- s, err := sender.New(nil)
+ s, err := sender.New()
require.NoError(t, err)
adminConfig := &models.AdminConfiguration{OrgID: orgID, Alertmanagers: []string{fakeAM.Server.URL}}
err = s.ApplyConfig(adminConfig)
@@ -710,8 +710,8 @@ func TestSchedule_ruleRoutine(t *testing.T) {
updateChan := make(chan struct{})
ctx := context.Background()
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
- sch.senders[orgID] = s
+ sch, ruleStore, _, _, _, alertsRouter := createSchedule(evalAppliedChan)
+ alertsRouter.Senders[orgID] = s
var rulePtr = CreateTestAlertRule(t, ruleStore, 10, orgID, eval.Alerting) // we want the alert to fire
var rule = *rulePtr
@@ -733,8 +733,15 @@ func TestSchedule_ruleRoutine(t *testing.T) {
}
sch.stateManager.Put(states)
states = sch.stateManager.GetStatesForRuleUID(rule.OrgID, rule.UID)
- expectedToBeSent := FromAlertsStateToStoppedAlert(states, sch.appURL, sch.clock)
- require.NotEmptyf(t, expectedToBeSent.PostableAlerts, "State manger was expected to return at least one state that can be expired")
+
+ expectedToBeSent := 0
+ for _, s := range states {
+ if s.State == eval.Normal || s.State == eval.Pending {
+ continue
+ }
+ expectedToBeSent++
+ }
+ require.Greaterf(t, expectedToBeSent, 0, "State manger was expected to return at least one state that can be expired")
go func() {
ctx, cancel := context.WithCancel(context.Background())
@@ -769,8 +776,8 @@ func TestSchedule_ruleRoutine(t *testing.T) {
var count int
require.Eventuallyf(t, func() bool {
count = fakeAM.AlertsCount()
- return count == len(expectedToBeSent.PostableAlerts)
- }, 20*time.Second, 200*time.Millisecond, "Alertmanager was expected to receive %d alerts, but received only %d", len(expectedToBeSent.PostableAlerts), count)
+ return count == expectedToBeSent
+ }, 20*time.Second, 200*time.Millisecond, "Alertmanager was expected to receive %d alerts, but received only %d", expectedToBeSent, count)
for _, alert := range fakeAM.Alerts() {
require.Equalf(t, sch.clock.Now().UTC(), time.Time(alert.EndsAt).UTC(), "Alert received by Alertmanager should be expired as of now")
@@ -799,7 +806,7 @@ func TestSchedule_ruleRoutine(t *testing.T) {
defer fakeAM.Close()
orgID := rand.Int63()
- s, err := sender.New(nil)
+ s, err := sender.New()
require.NoError(t, err)
adminConfig := &models.AdminConfiguration{OrgID: orgID, Alertmanagers: []string{fakeAM.Server.URL}}
err = s.ApplyConfig(adminConfig)
@@ -814,8 +821,8 @@ func TestSchedule_ruleRoutine(t *testing.T) {
evalChan := make(chan *evaluation)
evalAppliedChan := make(chan time.Time)
- sch, ruleStore, _, _, _ := createSchedule(evalAppliedChan)
- sch.senders[orgID] = s
+ sch, ruleStore, _, _, _, alertsRouter := createSchedule(evalAppliedChan)
+ alertsRouter.Senders[orgID] = s
// eval.Alerting makes state manager to create notifications for alertmanagers
rule := CreateTestAlertRule(t, ruleStore, 10, orgID, eval.Alerting)
@@ -925,11 +932,11 @@ func setupSchedulerWithFakeStores(t *testing.T) *schedule {
ruleStore := store.NewFakeRuleStore(t)
instanceStore := &store.FakeInstanceStore{}
adminConfigStore := store.NewFakeAdminConfigStore(t)
- sch, _ := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, nil)
+ sch, _, _ := setupScheduler(t, ruleStore, instanceStore, adminConfigStore, nil)
return sch
}
-func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, acs store.AdminConfigurationStore, registry *prometheus.Registry) (*schedule, *clock.Mock) {
+func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, acs store.AdminConfigurationStore, registry *prometheus.Registry) (*schedule, *clock.Mock, *sender.AlertsRouter) {
t.Helper()
fakeAnnoRepo := store.NewFakeAnnotationsRepo()
@@ -945,25 +952,31 @@ func setupScheduler(t *testing.T, rs store.RuleStore, is store.InstanceStore, ac
moa, err := notifier.NewMultiOrgAlertmanager(&setting.Cfg{}, ¬ifier.FakeConfigStore{}, ¬ifier.FakeOrgStore{}, ¬ifier.FakeKVStore{}, provisioning.NewFakeProvisioningStore(), decryptFn, m.GetMultiOrgAlertmanagerMetrics(), nil, log.New("testlogger"), secretsService)
require.NoError(t, err)
- schedCfg := SchedulerCfg{
- C: mockedClock,
+ appUrl := &url.URL{
+ Scheme: "http",
+ Host: "localhost",
+ }
+
+ alertsRouter := sender.NewAlertsRouter(moa, acs, mockedClock, appUrl, map[int64]struct{}{}, 10*time.Minute) // do not poll in unit tests.
+
+ cfg := setting.UnifiedAlertingSettings{
BaseInterval: time.Second,
MaxAttempts: 1,
- Evaluator: eval.NewEvaluator(&setting.Cfg{ExpressionsEnabled: true}, logger, nil, secretsService, expr.ProvideService(&setting.Cfg{ExpressionsEnabled: true}, nil, nil)),
- RuleStore: rs,
- InstanceStore: is,
- AdminConfigStore: acs,
- MultiOrgNotifier: moa,
- Logger: logger,
- Metrics: m.GetSchedulerMetrics(),
AdminConfigPollInterval: 10 * time.Minute, // do not poll in unit tests.
}
- st := state.NewManager(schedCfg.Logger, m.GetStateMetrics(), nil, rs, is, &dashboards.FakeDashboardService{}, &image.NoopImageService{}, clock.NewMock())
- appUrl := &url.URL{
- Scheme: "http",
- Host: "localhost",
+
+ schedCfg := SchedulerCfg{
+ Cfg: cfg,
+ C: mockedClock,
+ Evaluator: eval.NewEvaluator(&setting.Cfg{ExpressionsEnabled: true}, logger, nil, secretsService, expr.ProvideService(&setting.Cfg{ExpressionsEnabled: true}, nil, nil)),
+ RuleStore: rs,
+ InstanceStore: is,
+ Logger: logger,
+ Metrics: m.GetSchedulerMetrics(),
+ AlertSender: alertsRouter,
}
- return NewScheduler(schedCfg, appUrl, st, busmock.New()), mockedClock
+ st := state.NewManager(schedCfg.Logger, m.GetStateMetrics(), nil, rs, is, &dashboards.FakeDashboardService{}, &image.NoopImageService{}, clock.NewMock())
+ return NewScheduler(schedCfg, appUrl, st, busmock.New()), mockedClock, alertsRouter
}
// createTestAlertRule creates a dummy alert definition to be used by the tests.
diff --git a/pkg/services/ngalert/sender/router.go b/pkg/services/ngalert/sender/router.go
new file mode 100644
index 00000000000..decd20d1a6d
--- /dev/null
+++ b/pkg/services/ngalert/sender/router.go
@@ -0,0 +1,255 @@
+package sender
+
+import (
+ "context"
+ "errors"
+ "net/url"
+ "sync"
+ "time"
+
+ "github.com/benbjohnson/clock"
+
+ "github.com/grafana/grafana/pkg/infra/log"
+ "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
+ "github.com/grafana/grafana/pkg/services/ngalert/models"
+ "github.com/grafana/grafana/pkg/services/ngalert/notifier"
+ "github.com/grafana/grafana/pkg/services/ngalert/store"
+)
+
+// AlertsRouter handles alerts generated during alert rule evaluation.
+// Based on rule's orgID and the configuration for that organization,
+// it determines whether an alert needs to be sent to an external Alertmanager and\or internal notifier.Alertmanager
+//
+// After creating a AlertsRouter, you must call Run to keep the AlertsRouter's
+// state synchronized with the alerting configuration.
+type AlertsRouter struct {
+ logger log.Logger
+ clock clock.Clock
+ adminConfigStore store.AdminConfigurationStore
+
+ // Senders help us send alerts to external Alertmanagers.
+ AdminConfigMtx sync.RWMutex
+ SendAlertsTo map[int64]models.AlertmanagersChoice
+ Senders map[int64]*Sender
+ SendersCfgHash map[int64]string
+
+ MultiOrgNotifier *notifier.MultiOrgAlertmanager
+
+ appURL *url.URL
+ disabledOrgs map[int64]struct{}
+ adminConfigPollInterval time.Duration
+}
+
+func NewAlertsRouter(multiOrgNotifier *notifier.MultiOrgAlertmanager, store store.AdminConfigurationStore, clk clock.Clock, appURL *url.URL, disabledOrgs map[int64]struct{}, configPollInterval time.Duration) *AlertsRouter {
+ d := &AlertsRouter{
+ logger: log.New("alerts-router"),
+ clock: clk,
+ adminConfigStore: store,
+
+ AdminConfigMtx: sync.RWMutex{},
+ Senders: map[int64]*Sender{},
+ SendersCfgHash: map[int64]string{},
+ SendAlertsTo: map[int64]models.AlertmanagersChoice{},
+
+ MultiOrgNotifier: multiOrgNotifier,
+
+ appURL: appURL,
+ disabledOrgs: disabledOrgs,
+ adminConfigPollInterval: configPollInterval,
+ }
+ return d
+}
+
+// SyncAndApplyConfigFromDatabase looks for the admin configuration in the database
+// and adjusts the sender(s) and alert handling mechanism accordingly.
+func (d *AlertsRouter) SyncAndApplyConfigFromDatabase() error {
+ d.logger.Debug("start of admin configuration sync")
+ cfgs, err := d.adminConfigStore.GetAdminConfigurations()
+ if err != nil {
+ return err
+ }
+
+ d.logger.Debug("found admin configurations", "count", len(cfgs))
+
+ orgsFound := make(map[int64]struct{}, len(cfgs))
+ d.AdminConfigMtx.Lock()
+ for _, cfg := range cfgs {
+ _, isDisabledOrg := d.disabledOrgs[cfg.OrgID]
+ if isDisabledOrg {
+ d.logger.Debug("skipping starting sender for disabled org", "org", cfg.OrgID)
+ continue
+ }
+
+ // Update the Alertmanagers choice for the organization.
+ d.SendAlertsTo[cfg.OrgID] = cfg.SendAlertsTo
+
+ orgsFound[cfg.OrgID] = struct{}{} // keep track of the which senders we need to keep.
+
+ existing, ok := d.Senders[cfg.OrgID]
+
+ // We have no running sender and no Alertmanager(s) configured, no-op.
+ if !ok && len(cfg.Alertmanagers) == 0 {
+ d.logger.Debug("no external alertmanagers configured", "org", cfg.OrgID)
+ continue
+ }
+ // We have no running sender and alerts are handled internally, no-op.
+ if !ok && cfg.SendAlertsTo == models.InternalAlertmanager {
+ d.logger.Debug("alerts are handled internally", "org", cfg.OrgID)
+ continue
+ }
+
+ // We have a running sender but no Alertmanager(s) configured, shut it down.
+ if ok && len(cfg.Alertmanagers) == 0 {
+ d.logger.Debug("no external alertmanager(s) configured, sender will be stopped", "org", cfg.OrgID)
+ delete(orgsFound, cfg.OrgID)
+ continue
+ }
+
+ // We have a running sender, check if we need to apply a new config.
+ if ok {
+ if d.SendersCfgHash[cfg.OrgID] == cfg.AsSHA256() {
+ d.logger.Debug("sender configuration is the same as the one running, no-op", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
+ continue
+ }
+
+ d.logger.Debug("applying new configuration to sender", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
+ err := existing.ApplyConfig(cfg)
+ if err != nil {
+ d.logger.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
+ continue
+ }
+ d.SendersCfgHash[cfg.OrgID] = cfg.AsSHA256()
+ continue
+ }
+
+ // No sender and have Alertmanager(s) to send to - start a new one.
+ d.logger.Info("creating new sender for the external alertmanagers", "org", cfg.OrgID, "alertmanagers", cfg.Alertmanagers)
+ s, err := New()
+ if err != nil {
+ d.logger.Error("unable to start the sender", "err", err, "org", cfg.OrgID)
+ continue
+ }
+
+ d.Senders[cfg.OrgID] = s
+ s.Run()
+
+ err = s.ApplyConfig(cfg)
+ if err != nil {
+ d.logger.Error("failed to apply configuration", "err", err, "org", cfg.OrgID)
+ continue
+ }
+
+ d.SendersCfgHash[cfg.OrgID] = cfg.AsSHA256()
+ }
+
+ sendersToStop := map[int64]*Sender{}
+
+ for orgID, s := range d.Senders {
+ if _, exists := orgsFound[orgID]; !exists {
+ sendersToStop[orgID] = s
+ delete(d.Senders, orgID)
+ delete(d.SendersCfgHash, orgID)
+ }
+ }
+ d.AdminConfigMtx.Unlock()
+
+ // We can now stop these senders w/o having to hold a lock.
+ for orgID, s := range sendersToStop {
+ d.logger.Info("stopping sender", "org", orgID)
+ s.Stop()
+ d.logger.Info("stopped sender", "org", orgID)
+ }
+
+ d.logger.Debug("finish of admin configuration sync")
+
+ return nil
+}
+
+func (d *AlertsRouter) Send(key models.AlertRuleKey, alerts definitions.PostableAlerts) {
+ logger := d.logger.New("rule_uid", key.UID, "org", key.OrgID)
+ if len(alerts.PostableAlerts) == 0 {
+ logger.Debug("no alerts to notify about")
+ return
+ }
+ // Send alerts to local notifier if they need to be handled internally
+ // or if no external AMs have been discovered yet.
+ var localNotifierExist, externalNotifierExist bool
+ if d.SendAlertsTo[key.OrgID] == models.ExternalAlertmanagers && len(d.AlertmanagersFor(key.OrgID)) > 0 {
+ logger.Debug("no alerts to put in the notifier")
+ } else {
+ logger.Debug("sending alerts to local notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts)
+ n, err := d.MultiOrgNotifier.AlertmanagerFor(key.OrgID)
+ if err == nil {
+ localNotifierExist = true
+ if err := n.PutAlerts(alerts); err != nil {
+ logger.Error("failed to put alerts in the local notifier", "count", len(alerts.PostableAlerts), "err", err)
+ }
+ } else {
+ if errors.Is(err, notifier.ErrNoAlertmanagerForOrg) {
+ logger.Debug("local notifier was not found")
+ } else {
+ logger.Error("local notifier is not available", "err", err)
+ }
+ }
+ }
+
+ // Send alerts to external Alertmanager(s) if we have a sender for this organization
+ // and alerts are not being handled just internally.
+ d.AdminConfigMtx.RLock()
+ defer d.AdminConfigMtx.RUnlock()
+ s, ok := d.Senders[key.OrgID]
+ if ok && d.SendAlertsTo[key.OrgID] != models.InternalAlertmanager {
+ logger.Debug("sending alerts to external notifier", "count", len(alerts.PostableAlerts), "alerts", alerts.PostableAlerts)
+ s.SendAlerts(alerts)
+ externalNotifierExist = true
+ }
+
+ if !localNotifierExist && !externalNotifierExist {
+ logger.Error("no external or internal notifier - [%d] alerts not delivered", len(alerts.PostableAlerts))
+ }
+}
+
+// AlertmanagersFor returns all the discovered Alertmanager(s) for a particular organization.
+func (d *AlertsRouter) AlertmanagersFor(orgID int64) []*url.URL {
+ d.AdminConfigMtx.RLock()
+ defer d.AdminConfigMtx.RUnlock()
+ s, ok := d.Senders[orgID]
+ if !ok {
+ return []*url.URL{}
+ }
+ return s.Alertmanagers()
+}
+
+// DroppedAlertmanagersFor returns all the dropped Alertmanager(s) for a particular organization.
+func (d *AlertsRouter) DroppedAlertmanagersFor(orgID int64) []*url.URL {
+ d.AdminConfigMtx.RLock()
+ defer d.AdminConfigMtx.RUnlock()
+ s, ok := d.Senders[orgID]
+ if !ok {
+ return []*url.URL{}
+ }
+
+ return s.DroppedAlertmanagers()
+}
+
+// Run starts regular updates of the configuration.
+func (d *AlertsRouter) Run(ctx context.Context) error {
+ for {
+ select {
+ case <-time.After(d.adminConfigPollInterval):
+ if err := d.SyncAndApplyConfigFromDatabase(); err != nil {
+ d.logger.Error("unable to sync admin configuration", "err", err)
+ }
+ case <-ctx.Done():
+ // Stop sending alerts to all external Alertmanager(s).
+ d.AdminConfigMtx.Lock()
+ for orgID, s := range d.Senders {
+ delete(d.Senders, orgID) // delete before we stop to make sure we don't accept any more alerts.
+ s.Stop()
+ }
+ d.AdminConfigMtx.Unlock()
+
+ return nil
+ }
+ }
+}
diff --git a/pkg/services/ngalert/sender/sender.go b/pkg/services/ngalert/sender/sender.go
index 444fe2744fe..d15479e77b4 100644
--- a/pkg/services/ngalert/sender/sender.go
+++ b/pkg/services/ngalert/sender/sender.go
@@ -9,7 +9,6 @@ import (
"github.com/grafana/grafana/pkg/infra/log"
apimodels "github.com/grafana/grafana/pkg/services/ngalert/api/tooling/definitions"
- "github.com/grafana/grafana/pkg/services/ngalert/metrics"
ngmodels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/prometheus/alertmanager/api/v2/models"
@@ -38,7 +37,7 @@ type Sender struct {
sdManager *discovery.Manager
}
-func New(_ *metrics.Scheduler) (*Sender, error) {
+func New() (*Sender, error) {
l := log.New("sender")
sdCtx, sdCancel := context.WithCancel(context.Background())
s := &Sender{
diff --git a/pkg/services/ngalert/state/manager.go b/pkg/services/ngalert/state/manager.go
index 740ec44f762..34e2d106814 100644
--- a/pkg/services/ngalert/state/manager.go
+++ b/pkg/services/ngalert/state/manager.go
@@ -342,7 +342,7 @@ func (st *Manager) annotateState(ctx context.Context, alertRule *ngModels.AlertR
panelId, err := strconv.ParseInt(panelUid, 10, 64)
if err != nil {
- st.log.Error("error parsing panelUID for alert annotation", "panelUID", panelUid, "alertRuleUID", alertRule.UID, "error", err.Error())
+ st.log.Error("error parsing panelUID for alert annotation", "panelUID", panelUid, "alertRuleUID", alertRule.UID, "err", err.Error())
return
}
diff --git a/pkg/services/ngalert/state/manager_test.go b/pkg/services/ngalert/state/manager_test.go
index b8b9238e23c..6051a4e7540 100644
--- a/pkg/services/ngalert/state/manager_test.go
+++ b/pkg/services/ngalert/state/manager_test.go
@@ -1655,6 +1655,103 @@ func TestProcessEvalResults(t *testing.T) {
},
},
},
+ {
+ desc: "normal -> alerting -> error when result is Error and ExecErrorState is Error",
+ alertRule: &models.AlertRule{
+ OrgID: 1,
+ Title: "test_title",
+ UID: "test_alert_rule_uid_2",
+ NamespaceUID: "test_namespace_uid",
+ Annotations: map[string]string{"annotation": "test"},
+ Labels: map[string]string{"label": "test"},
+ IntervalSeconds: 10,
+ For: 20 * time.Second,
+ ExecErrState: models.ErrorErrState,
+ },
+ evalResults: []eval.Results{
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Alerting,
+ EvaluatedAt: evaluationTime,
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Alerting,
+ EvaluatedAt: evaluationTime.Add(10 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Alerting,
+ EvaluatedAt: evaluationTime.Add(20 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Error,
+ EvaluatedAt: evaluationTime.Add(30 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Error,
+ EvaluatedAt: evaluationTime.Add(40 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ {
+ eval.Result{
+ Instance: data.Labels{"instance_label": "test"},
+ State: eval.Error,
+ EvaluatedAt: evaluationTime.Add(50 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ },
+ },
+ },
+ expectedAnnotations: 3,
+ expectedStates: map[string]*state.State{
+ `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test"],["label","test"]]`: {
+ AlertRuleUID: "test_alert_rule_uid_2",
+ OrgID: 1,
+ CacheId: `[["__alert_rule_namespace_uid__","test_namespace_uid"],["__alert_rule_uid__","test_alert_rule_uid_2"],["alertname","test_title"],["instance_label","test"],["label","test"]]`,
+ Labels: data.Labels{
+ "__alert_rule_namespace_uid__": "test_namespace_uid",
+ "__alert_rule_uid__": "test_alert_rule_uid_2",
+ "alertname": "test_title",
+ "label": "test",
+ "instance_label": "test",
+ },
+ State: eval.Error,
+ Results: []state.Evaluation{
+ {
+ EvaluationTime: evaluationTime.Add(40 * time.Second),
+ EvaluationState: eval.Error,
+ Values: make(map[string]*float64),
+ },
+ {
+ EvaluationTime: evaluationTime.Add(50 * time.Second),
+ EvaluationState: eval.Error,
+ Values: make(map[string]*float64),
+ },
+ },
+ StartsAt: evaluationTime.Add(20 * time.Second),
+ EndsAt: evaluationTime.Add(50 * time.Second).Add(state.ResendDelay * 3),
+ LastEvaluationTime: evaluationTime.Add(50 * time.Second),
+ EvaluationDuration: evaluationDuration,
+ Annotations: map[string]string{"annotation": "test"},
+ },
+ },
+ },
{
desc: "normal -> alerting -> error -> alerting - it should clear the error",
alertRule: &models.AlertRule{
diff --git a/pkg/services/ngalert/state/state.go b/pkg/services/ngalert/state/state.go
index 7aa8c20a66a..d8bf35c1fc7 100644
--- a/pkg/services/ngalert/state/state.go
+++ b/pkg/services/ngalert/state/state.go
@@ -122,6 +122,12 @@ func (a *State) resultError(alertRule *models.AlertRule, result eval.Result) {
switch a.State {
case eval.Alerting, eval.Error:
+ // We must set the state here as the state can change both from Alerting
+ // to Error and from Error to Alerting. This can happen when the datasource
+ // is unavailable or queries against the datasource returns errors, and is
+ // then resolved as soon as the datasource is available and queries return
+ // without error
+ a.State = execErrState
a.setEndsAt(alertRule, result)
case eval.Pending:
if result.EvaluatedAt.Sub(a.StartsAt) >= alertRule.For {
diff --git a/pkg/services/query/query.go b/pkg/services/query/query.go
index 4f7a478c476..2ddb7dec1b9 100644
--- a/pkg/services/query/query.go
+++ b/pkg/services/query/query.go
@@ -151,7 +151,7 @@ func (s *Service) handleQueryData(ctx context.Context, user *models.SignedInUser
instanceSettings, err := adapters.ModelToInstanceSettings(ds, s.decryptSecureJsonDataFn(ctx))
if err != nil {
- return nil, fmt.Errorf("failed to convert data source to instance settings: %w", err)
+ return nil, err
}
req := &backend.QueryDataRequest{
@@ -343,12 +343,8 @@ func (s *Service) getDataSourceFromQuery(ctx context.Context, user *models.Signe
return nil, NewErrBadQuery("missing data source ID/UID")
}
-func (s *Service) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) map[string]string {
- return func(ds *datasources.DataSource) map[string]string {
- decryptedJsonData, err := s.dataSourceService.DecryptedValues(ctx, ds)
- if err != nil {
- s.log.Error("Failed to decrypt secure json data", "error", err)
- }
- return decryptedJsonData
+func (s *Service) decryptSecureJsonDataFn(ctx context.Context) func(ds *datasources.DataSource) (map[string]string, error) {
+ return func(ds *datasources.DataSource) (map[string]string, error) {
+ return s.dataSourceService.DecryptedValues(ctx, ds)
}
}
diff --git a/pkg/services/secrets/kvstore/migrations/migrator.go b/pkg/services/secrets/kvstore/migrations/migrator.go
new file mode 100644
index 00000000000..14c3c6f0b07
--- /dev/null
+++ b/pkg/services/secrets/kvstore/migrations/migrator.go
@@ -0,0 +1,51 @@
+package migrations
+
+import (
+ "context"
+ "reflect"
+ "time"
+
+ "github.com/grafana/grafana/pkg/infra/log"
+ "github.com/grafana/grafana/pkg/infra/serverlock"
+ datasources "github.com/grafana/grafana/pkg/services/datasources/service"
+)
+
+var logger = log.New("secret.migration")
+
+// SecretMigrationService is used to migrate legacy secrets to new unified secrets.
+type SecretMigrationService interface {
+ Migrate(ctx context.Context) error
+}
+
+type SecretMigrationServiceImpl struct {
+ Services []SecretMigrationService
+ ServerLockService *serverlock.ServerLockService
+}
+
+func ProvideSecretMigrationService(
+ serverLockService *serverlock.ServerLockService,
+ dataSourceSecretMigrationService *datasources.DataSourceSecretMigrationService,
+) *SecretMigrationServiceImpl {
+ return &SecretMigrationServiceImpl{
+ ServerLockService: serverLockService,
+ Services: []SecretMigrationService{
+ dataSourceSecretMigrationService,
+ },
+ }
+}
+
+// Run migration services. This will block until all services have exited.
+func (s *SecretMigrationServiceImpl) Migrate(ctx context.Context) error {
+ // Start migration services.
+ return s.ServerLockService.LockAndExecute(ctx, "migrate secrets to unified secrets", time.Minute*10, func(context.Context) {
+ for _, service := range s.Services {
+ serviceName := reflect.TypeOf(service).String()
+ logger.Debug("Starting secret migration service", "service", serviceName)
+ err := service.Migrate(ctx)
+ if err != nil {
+ logger.Error("Stopped secret migration service", "service", serviceName, "reason", err)
+ }
+ logger.Debug("Finished secret migration service", "service", serviceName)
+ }
+ })
+}
diff --git a/pkg/services/sqlstore/datasource.go b/pkg/services/sqlstore/datasource.go
index 771c642d77e..bf66bb04cda 100644
--- a/pkg/services/sqlstore/datasource.go
+++ b/pkg/services/sqlstore/datasource.go
@@ -61,6 +61,13 @@ func (ss *SQLStore) GetDataSources(ctx context.Context, query *datasources.GetDa
})
}
+func (ss *SQLStore) GetAllDataSources(ctx context.Context, query *datasources.GetAllDataSourcesQuery) error {
+ return ss.WithDbSession(ctx, func(sess *DBSession) error {
+ query.Result = make([]*datasources.DataSource, 0)
+ return sess.Asc("name").Find(&query.Result)
+ })
+}
+
// GetDataSourcesByType returns all datasources for a given type or an error if the specified type is an empty string
func (ss *SQLStore) GetDataSourcesByType(ctx context.Context, query *datasources.GetDataSourcesByTypeQuery) error {
if query.Type == "" {
@@ -255,6 +262,9 @@ func (ss *SQLStore) UpdateDataSource(ctx context.Context, cmd *datasources.Updat
sess.MustCols("password")
sess.MustCols("basic_auth_password")
sess.MustCols("user")
+ // Make sure secure json data is zeroed out if empty. We do this as we want to migrate secrets from
+ // secure json data to the unified secrets table.
+ sess.MustCols("secure_json_data")
var updateSession *xorm.Session
if cmd.Version != 0 {
diff --git a/pkg/services/sqlstore/migrations/ualert/channel.go b/pkg/services/sqlstore/migrations/ualert/channel.go
index 27e0317c812..5f193037e55 100644
--- a/pkg/services/sqlstore/migrations/ualert/channel.go
+++ b/pkg/services/sqlstore/migrations/ualert/channel.go
@@ -7,8 +7,10 @@ import (
"fmt"
"github.com/prometheus/alertmanager/pkg/labels"
+ "github.com/prometheus/common/model"
"github.com/grafana/grafana/pkg/components/simplejson"
+ ngModels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/util"
)
@@ -229,8 +231,9 @@ func (m *migration) createDefaultRouteAndReceiver(defaultChannels []*notificatio
}
defaultRoute := &Route{
- Receiver: defaultReceiverName,
- Routes: make([]*Route, 0),
+ Receiver: defaultReceiverName,
+ Routes: make([]*Route, 0),
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel}, // To keep parity with pre-migration notifications.
}
return defaultReceiver, defaultRoute, nil
@@ -438,10 +441,11 @@ type PostableApiAlertingConfig struct {
}
type Route struct {
- Receiver string `yaml:"receiver,omitempty" json:"receiver,omitempty"`
- Matchers Matchers `yaml:"matchers,omitempty" json:"matchers,omitempty"`
- Routes []*Route `yaml:"routes,omitempty" json:"routes,omitempty"`
- Continue bool `yaml:"continue,omitempty" json:"continue,omitempty"`
+ Receiver string `yaml:"receiver,omitempty" json:"receiver,omitempty"`
+ Matchers Matchers `yaml:"matchers,omitempty" json:"matchers,omitempty"`
+ Routes []*Route `yaml:"routes,omitempty" json:"routes,omitempty"`
+ Continue bool `yaml:"continue,omitempty" json:"continue,omitempty"`
+ GroupByStr []string `yaml:"group_by,omitempty" json:"group_by,omitempty"`
}
type Matchers labels.Matchers
diff --git a/pkg/services/sqlstore/migrations/ualert/channel_test.go b/pkg/services/sqlstore/migrations/ualert/channel_test.go
index 688b3d62eb1..93be8d65d46 100644
--- a/pkg/services/sqlstore/migrations/ualert/channel_test.go
+++ b/pkg/services/sqlstore/migrations/ualert/channel_test.go
@@ -3,9 +3,11 @@ package ualert
import (
"testing"
+ "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"github.com/grafana/grafana/pkg/components/simplejson"
+ ngModels "github.com/grafana/grafana/pkg/services/ngalert/models"
)
func TestFilterReceiversForAlert(t *testing.T) {
@@ -144,10 +146,11 @@ func TestCreateRoute(t *testing.T) {
"recv1": struct{}{},
},
expected: &Route{
- Receiver: "recv1",
- Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
- Routes: nil,
- Continue: false,
+ Receiver: "recv1",
+ Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
+ Routes: nil,
+ Continue: false,
+ GroupByStr: nil,
},
},
{
@@ -162,19 +165,22 @@ func TestCreateRoute(t *testing.T) {
Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
Routes: []*Route{
{
- Receiver: "recv1",
- Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
- Routes: nil,
- Continue: true,
+ Receiver: "recv1",
+ Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
+ Routes: nil,
+ Continue: true,
+ GroupByStr: nil,
},
{
- Receiver: "recv2",
- Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
- Routes: nil,
- Continue: true,
+ Receiver: "recv2",
+ Matchers: Matchers{{Type: 0, Name: "rule_uid", Value: "r_uid1"}},
+ Routes: nil,
+ Continue: true,
+ GroupByStr: nil,
},
},
- Continue: false,
+ Continue: false,
+ GroupByStr: nil,
},
},
}
@@ -294,8 +300,9 @@ func TestCreateDefaultRouteAndReceiver(t *testing.T) {
GrafanaManagedReceivers: []*PostableGrafanaReceiver{{Name: "name1"}, {Name: "name2"}},
},
expRoute: &Route{
- Receiver: "autogen-contact-point-default",
- Routes: make([]*Route, 0),
+ Receiver: "autogen-contact-point-default",
+ Routes: make([]*Route, 0),
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
},
{
@@ -306,8 +313,9 @@ func TestCreateDefaultRouteAndReceiver(t *testing.T) {
GrafanaManagedReceivers: []*PostableGrafanaReceiver{},
},
expRoute: &Route{
- Receiver: "autogen-contact-point-default",
- Routes: make([]*Route, 0),
+ Receiver: "autogen-contact-point-default",
+ Routes: make([]*Route, 0),
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
},
{
@@ -315,8 +323,9 @@ func TestCreateDefaultRouteAndReceiver(t *testing.T) {
defaultChannels: []*notificationChannel{createNotChannel(t, "uid1", int64(1), "name1")},
expRecv: nil,
expRoute: &Route{
- Receiver: "name1",
- Routes: make([]*Route, 0),
+ Receiver: "name1",
+ Routes: make([]*Route, 0),
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
},
}
diff --git a/pkg/services/sqlstore/migrations/ualert/migration_test.go b/pkg/services/sqlstore/migrations/ualert/migration_test.go
index 2736eaf3eba..d162fdd4993 100644
--- a/pkg/services/sqlstore/migrations/ualert/migration_test.go
+++ b/pkg/services/sqlstore/migrations/ualert/migration_test.go
@@ -9,12 +9,14 @@ import (
"github.com/google/go-cmp/cmp"
"github.com/google/go-cmp/cmp/cmpopts"
"github.com/prometheus/alertmanager/pkg/labels"
+ "github.com/prometheus/common/model"
"github.com/stretchr/testify/require"
"xorm.io/xorm"
"github.com/grafana/grafana/pkg/components/simplejson"
"github.com/grafana/grafana/pkg/models"
"github.com/grafana/grafana/pkg/services/datasources"
+ ngModels "github.com/grafana/grafana/pkg/services/ngalert/models"
"github.com/grafana/grafana/pkg/services/sqlstore/migrations"
"github.com/grafana/grafana/pkg/services/sqlstore/migrations/ualert"
"github.com/grafana/grafana/pkg/services/sqlstore/migrator"
@@ -156,7 +158,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
Routes: []*ualert.Route{
{Receiver: "notifier1", Matchers: createAlertNameMatchers("alert1")}, // These Matchers are temporary and will be replaced below with generated rule_uid.
{Matchers: createAlertNameMatchers("alert2"), Routes: []*ualert.Route{
@@ -177,7 +180,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(2): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "notifier6",
+ Receiver: "notifier6",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
Routes: []*ualert.Route{
{Matchers: createAlertNameMatchers("alert4"), Routes: []*ualert.Route{
{Receiver: "notifier4", Matchers: createAlertNameMatchers("alert4"), Continue: true},
@@ -209,7 +213,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -229,7 +234,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "notifier1",
+ Receiver: "notifier1",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -249,7 +255,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -272,7 +279,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -297,7 +305,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -322,7 +331,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
Routes: []*ualert.Route{
{Receiver: "notifier1", Matchers: createAlertNameMatchers("alert1")},
{Matchers: createAlertNameMatchers("alert2"), Routes: []*ualert.Route{
@@ -350,7 +360,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -372,7 +383,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
},
Receivers: []*ualert.PostableApiReceiver{
{Name: "notifier1", GrafanaManagedReceivers: []*ualert.PostableGrafanaReceiver{{Name: "notifier1", Type: "email"}}},
@@ -395,7 +407,8 @@ func TestDashAlertMigration(t *testing.T) {
int64(1): {
AlertmanagerConfig: ualert.PostableApiAlertingConfig{
Route: &ualert.Route{
- Receiver: "autogen-contact-point-default",
+ Receiver: "autogen-contact-point-default",
+ GroupByStr: []string{ngModels.FolderTitleLabel, model.AlertNameLabel},
Routes: []*ualert.Route{
{Receiver: "notifier1", Matchers: createAlertNameMatchers("alert1")},
},
diff --git a/pkg/services/sqlstore/playlist.go b/pkg/services/sqlstore/playlist.go
index 9fbc3b02bbb..269bf06183a 100644
--- a/pkg/services/sqlstore/playlist.go
+++ b/pkg/services/sqlstore/playlist.go
@@ -103,7 +103,10 @@ func (ss *SQLStore) GetPlaylist(ctx context.Context, query *models.GetPlaylistBy
return ss.WithDbSession(ctx, func(sess *DBSession) error {
playlist := models.Playlist{UID: query.UID, OrgId: query.OrgId}
- _, err := sess.Get(&playlist)
+ exists, err := sess.Get(&playlist)
+ if !exists {
+ return models.ErrPlaylistNotFound
+ }
query.Result = &playlist
return err
diff --git a/pkg/services/sqlstore/playlist_test.go b/pkg/services/sqlstore/playlist_test.go
index 762b3f93d11..a3605bdb429 100644
--- a/pkg/services/sqlstore/playlist_test.go
+++ b/pkg/services/sqlstore/playlist_test.go
@@ -25,6 +25,15 @@ func TestIntegrationPlaylistDataAccess(t *testing.T) {
require.NoError(t, err)
uid := cmd.Result.UID
+ t.Run("Can get playlist", func(t *testing.T) {
+ get := &models.GetPlaylistByUidQuery{UID: uid, OrgId: 1}
+ err = ss.GetPlaylist(context.Background(), get)
+ require.NoError(t, err)
+ require.NotNil(t, get.Result)
+ require.Equal(t, get.Result.Name, "NYC office")
+ require.Equal(t, get.Result.Interval, "10m")
+ })
+
t.Run("Can get playlist items", func(t *testing.T) {
get := &models.GetPlaylistItemsByUidQuery{PlaylistUID: uid, OrgId: 1}
err = ss.GetPlaylistItem(context.Background(), get)
@@ -49,11 +58,18 @@ func TestIntegrationPlaylistDataAccess(t *testing.T) {
getQuery := models.GetPlaylistByUidQuery{UID: uid, OrgId: 1}
err = ss.GetPlaylist(context.Background(), &getQuery)
- require.NoError(t, err)
- require.Equal(t, uid, getQuery.Result.UID, "playlist should've been removed")
+ require.Error(t, err)
+ require.ErrorIs(t, err, models.ErrPlaylistNotFound)
})
})
+ t.Run("Get playlist that doesn't exist", func(t *testing.T) {
+ get := &models.GetPlaylistByUidQuery{UID: "unknown", OrgId: 1}
+ err := ss.GetPlaylist(context.Background(), get)
+ require.Error(t, err)
+ require.ErrorIs(t, err, models.ErrPlaylistNotFound)
+ })
+
t.Run("Delete playlist that doesn't exist", func(t *testing.T) {
deleteQuery := models.DeletePlaylistCommand{UID: "654312", OrgId: 1}
err := ss.DeletePlaylist(context.Background(), &deleteQuery)
diff --git a/pkg/services/store/entity_events.go b/pkg/services/store/entity_events.go
index 1698b8ffc1f..1c4055908d4 100644
--- a/pkg/services/store/entity_events.go
+++ b/pkg/services/store/entity_events.go
@@ -26,6 +26,7 @@ const (
EntityTypeDashboard EntityType = "dashboard"
EntityTypeFolder EntityType = "folder"
EntityTypeImage EntityType = "image"
+ EntityTypeJSON EntityType = "json"
)
// CreateDatabaseEntityId creates entityId for entities stored in the existing SQL tables
diff --git a/pkg/services/store/http.go b/pkg/services/store/http.go
index a2fcb6db191..43f2f69c8c3 100644
--- a/pkg/services/store/http.go
+++ b/pkg/services/store/http.go
@@ -15,8 +15,6 @@ import (
"github.com/grafana/grafana/pkg/web"
)
-var errFileTooBig = response.Error(400, "Please limit file uploaded under 1MB", errors.New("file is too big"))
-
// HTTPStorageService passes raw HTTP requests to a well typed storage service
type HTTPStorageService interface {
List(c *models.ReqContext) response.Response
@@ -57,78 +55,91 @@ func UploadErrorToStatusCode(err error) int {
}
func (s *httpStorage) Upload(c *models.ReqContext) response.Response {
- // 32 MB is the default used by FormFile()
- if err := c.Req.ParseMultipartForm(32 << 20); err != nil {
- return response.Error(400, "error in parsing form", err)
+ type rspInfo struct {
+ Message string `json:"message,omitempty"`
+ Path string `json:"path,omitempty"`
+ Count int `json:"count,omitempty"`
+ Bytes int `json:"bytes,omitempty"`
+ Error bool `json:"err,omitempty"`
}
+ rsp := &rspInfo{Message: "uploaded"}
+
c.Req.Body = http.MaxBytesReader(c.Resp, c.Req.Body, MAX_UPLOAD_SIZE)
if err := c.Req.ParseMultipartForm(MAX_UPLOAD_SIZE); err != nil {
- msg := fmt.Sprintf("Please limit file uploaded under %s", util.ByteCountSI(MAX_UPLOAD_SIZE))
- return response.Error(400, msg, err)
- }
-
- files := c.Req.MultipartForm.File["file"]
- if len(files) != 1 {
- return response.JSON(400, map[string]interface{}{
- "message": "please upload files one at a time",
- "err": true,
- })
- }
-
- folder, ok := c.Req.MultipartForm.Value["folder"]
- if !ok || len(folder) != 1 {
- return response.JSON(400, map[string]interface{}{
- "message": "please specify the upload folder",
- "err": true,
- })
- }
-
- fileHeader := files[0]
- if fileHeader.Size > MAX_UPLOAD_SIZE {
- return errFileTooBig
- }
-
- // restrict file size based on file size
- // open each file to copy contents
- file, err := fileHeader.Open()
- if err != nil {
- return response.Error(500, "Internal Server Error", err)
- }
- err = file.Close()
- if err != nil {
- return response.Error(500, "Internal Server Error", err)
- }
- data, err := ioutil.ReadAll(file)
- if err != nil {
- return response.Error(500, "Internal Server Error", err)
- }
-
- if (len(data)) > MAX_UPLOAD_SIZE {
- return errFileTooBig
- }
-
- path := folder[0] + "/" + fileHeader.Filename
-
- mimeType := http.DetectContentType(data)
-
- err = s.store.Upload(c.Req.Context(), c.SignedInUser, &UploadRequest{
- Contents: data,
- MimeType: mimeType,
- EntityType: EntityTypeImage,
- Path: path,
- OverwriteExistingFile: true,
- })
+ rsp.Message = fmt.Sprintf("Please limit file uploaded under %s", util.ByteCountSI(MAX_UPLOAD_SIZE))
+ rsp.Error = true
+ return response.JSON(400, rsp)
+ }
+ message := getMultipartFormValue(c.Req, "message")
+ overwriteExistingFile := getMultipartFormValue(c.Req, "overwriteExistingFile") != "false" // must explicitly overwrite
+ folder := getMultipartFormValue(c.Req, "folder")
+
+ for k, fileHeaders := range c.Req.MultipartForm.File {
+ path := getMultipartFormValue(c.Req, k+".path") // match the path with a file
+ if len(fileHeaders) > 1 {
+ path = ""
+ }
+ if path == "" && folder == "" {
+ rsp.Message = "please specify the upload folder or full path"
+ rsp.Error = true
+ return response.JSON(400, rsp)
+ }
+
+ for _, fileHeader := range fileHeaders {
+ // restrict file size based on file size
+ // open each file to copy contents
+ file, err := fileHeader.Open()
+ if err != nil {
+ return response.Error(500, "Internal Server Error", err)
+ }
+ err = file.Close()
+ if err != nil {
+ return response.Error(500, "Internal Server Error", err)
+ }
+ data, err := ioutil.ReadAll(file)
+ if err != nil {
+ return response.Error(500, "Internal Server Error", err)
+ }
+
+ if path == "" {
+ path = folder + "/" + fileHeader.Filename
+ }
+
+ entityType := EntityTypeJSON
+ mimeType := http.DetectContentType(data)
+ if strings.HasPrefix(mimeType, "image") {
+ entityType = EntityTypeImage
+ }
+
+ err = s.store.Upload(c.Req.Context(), c.SignedInUser, &UploadRequest{
+ Contents: data,
+ MimeType: mimeType,
+ EntityType: entityType,
+ Path: path,
+ OverwriteExistingFile: overwriteExistingFile,
+ Properties: map[string]string{
+ "message": message, // the commit/changelog entry
+ },
+ })
+
+ if err != nil {
+ return response.Error(UploadErrorToStatusCode(err), err.Error(), err)
+ }
+ rsp.Count++
+ rsp.Bytes += len(data)
+ rsp.Path = path
+ }
+ }
+
+ return response.JSON(200, rsp)
+}
- if err != nil {
- return response.Error(UploadErrorToStatusCode(err), err.Error(), err)
+func getMultipartFormValue(req *http.Request, key string) string {
+ v, ok := req.MultipartForm.Value[key]
+ if !ok || len(v) != 1 {
+ return ""
}
-
- return response.JSON(200, map[string]interface{}{
- "message": "Uploaded successfully",
- "path": path,
- "file": fileHeader.Filename,
- "err": true,
- })
+ return v[0]
}
func (s *httpStorage) Read(c *models.ReqContext) response.Response {
diff --git a/pkg/services/store/service.go b/pkg/services/store/service.go
index e096e8c89af..3713843f11f 100644
--- a/pkg/services/store/service.go
+++ b/pkg/services/store/service.go
@@ -4,7 +4,8 @@ import (
"context"
"errors"
"fmt"
- "strings"
+ "os"
+ "path/filepath"
"github.com/grafana/grafana/pkg/infra/filestorage"
"github.com/grafana/grafana/pkg/infra/log"
@@ -25,9 +26,8 @@ var ErrValidationFailed = errors.New("request validation failed")
var ErrFileAlreadyExists = errors.New("file exists")
const RootPublicStatic = "public-static"
-const RootResources = "resources"
-const MAX_UPLOAD_SIZE = 3 * 1024 * 1024 // 3MB
+const MAX_UPLOAD_SIZE = 1 * 1024 * 1024 // 3MB
type DeleteFolderCmd struct {
Path string `json:"path"`
@@ -85,11 +85,26 @@ func ProvideService(sql *sqlstore.SQLStore, features featuremgmt.FeatureToggles,
setDescription("Access files from the static public files"),
}
+ // Development dashboards
+ if setting.Env != setting.Prod {
+ devenv := filepath.Join(cfg.StaticRootPath, "..", "devenv")
+ if _, err := os.Stat(devenv); !os.IsNotExist(err) {
+ // path/to/whatever exists
+ s := newDiskStorage("devenv", "Development Environment", &StorageLocalDiskConfig{
+ Path: devenv,
+ Roots: []string{
+ "/dev-dashboards/",
+ },
+ }).setReadOnly(false).setDescription("Explore files within the developer environment directly")
+ globalRoots = append(globalRoots, s)
+ }
+ }
+
initializeOrgStorages := func(orgId int64) []storageRuntime {
storages := make([]storageRuntime, 0)
if features.IsEnabled(featuremgmt.FlagStorageLocalUpload) {
storages = append(storages,
- newSQLStorage(RootResources,
+ newSQLStorage("resources",
"Resources",
&StorageSQLConfig{orgId: orgId}, sql).
setBuiltin(true).
@@ -155,22 +170,16 @@ type UploadRequest struct {
OverwriteExistingFile bool
}
-func storageSupportsMutatingOperations(path string) bool {
- // TODO: this is temporary - make it rbac-driven
- return strings.HasPrefix(path, RootResources+"/") || path == RootResources
-}
-
func (s *standardStorageService) Upload(ctx context.Context, user *models.SignedInUser, req *UploadRequest) error {
- upload, _ := s.tree.getRoot(getOrgId(user), RootResources)
+ upload, storagePath := s.tree.getRoot(getOrgId(user), req.Path)
if upload == nil {
return ErrUploadFeatureDisabled
}
- if !storageSupportsMutatingOperations(req.Path) {
+ if upload.Meta().ReadOnly {
return ErrUnsupportedStorage
}
- storagePath := strings.TrimPrefix(req.Path, RootResources)
validationResult := s.validateUploadRequest(ctx, user, req, storagePath)
if !validationResult.ok {
grafanaStorageLogger.Warn("file upload validation failed", "filetype", req.MimeType, "path", req.Path, "reason", validationResult.reason)
@@ -186,7 +195,7 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed
grafanaStorageLogger.Info("uploading a file", "filetype", req.MimeType, "path", req.Path)
if !req.OverwriteExistingFile {
- file, err := upload.Get(ctx, storagePath)
+ file, err := upload.Store().Get(ctx, storagePath)
if err != nil {
grafanaStorageLogger.Error("failed while checking file existence", "err", err, "path", req.Path)
return ErrUploadInternalError
@@ -197,7 +206,7 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed
}
}
- if err := upload.Upsert(ctx, upsertCommand); err != nil {
+ if err := upload.Store().Upsert(ctx, upsertCommand); err != nil {
grafanaStorageLogger.Error("failed while uploading the file", "err", err, "path", req.Path)
return ErrUploadInternalError
}
@@ -206,34 +215,32 @@ func (s *standardStorageService) Upload(ctx context.Context, user *models.Signed
}
func (s *standardStorageService) DeleteFolder(ctx context.Context, user *models.SignedInUser, cmd *DeleteFolderCmd) error {
- resources, _ := s.tree.getRoot(getOrgId(user), RootResources)
- if resources == nil {
+ root, storagePath := s.tree.getRoot(getOrgId(user), cmd.Path)
+ if root == nil {
return fmt.Errorf("resources storage is not enabled")
}
- if !storageSupportsMutatingOperations(cmd.Path) {
+ if root.Meta().ReadOnly {
return ErrUnsupportedStorage
}
- storagePath := strings.TrimPrefix(cmd.Path, RootResources)
if storagePath == "" {
storagePath = filestorage.Delimiter
}
- return resources.DeleteFolder(ctx, storagePath, &filestorage.DeleteFolderOptions{Force: true})
+ return root.Store().DeleteFolder(ctx, storagePath, &filestorage.DeleteFolderOptions{Force: true})
}
func (s *standardStorageService) CreateFolder(ctx context.Context, user *models.SignedInUser, cmd *CreateFolderCmd) error {
- if !storageSupportsMutatingOperations(cmd.Path) {
- return ErrUnsupportedStorage
+ root, storagePath := s.tree.getRoot(getOrgId(user), cmd.Path)
+ if root == nil {
+ return fmt.Errorf("resources storage is not enabled")
}
- resources, _ := s.tree.getRoot(getOrgId(user), RootResources)
- if resources == nil {
- return fmt.Errorf("resources storage is not enabled")
+ if root.Meta().ReadOnly {
+ return ErrUnsupportedStorage
}
- storagePath := strings.TrimPrefix(cmd.Path, RootResources)
- err := resources.CreateFolder(ctx, storagePath)
+ err := root.Store().CreateFolder(ctx, storagePath)
if err != nil {
return err
}
@@ -241,17 +248,16 @@ func (s *standardStorageService) CreateFolder(ctx context.Context, user *models.
}
func (s *standardStorageService) Delete(ctx context.Context, user *models.SignedInUser, path string) error {
- if !storageSupportsMutatingOperations(path) {
- return ErrUnsupportedStorage
+ root, storagePath := s.tree.getRoot(getOrgId(user), path)
+ if root == nil {
+ return fmt.Errorf("resources storage is not enabled")
}
- resources, _ := s.tree.getRoot(getOrgId(user), RootResources)
- if resources == nil {
- return fmt.Errorf("resources storage is not enabled")
+ if root.Meta().ReadOnly {
+ return ErrUnsupportedStorage
}
- storagePath := strings.TrimPrefix(path, RootResources)
- err := resources.Delete(ctx, storagePath)
+ err := root.Store().Delete(ctx, storagePath)
if err != nil {
return err
}
diff --git a/pkg/services/store/tree.go b/pkg/services/store/tree.go
index 24f32377dbd..5748fe5adc3 100644
--- a/pkg/services/store/tree.go
+++ b/pkg/services/store/tree.go
@@ -11,7 +11,7 @@ import (
type nestedTree struct {
rootsByOrgId map[int64][]storageRuntime
- lookup map[int64]map[string]filestorage.FileStorage
+ lookup map[int64]map[string]storageRuntime
orgInitMutex sync.Mutex
initializeOrgStorages func(orgId int64) []storageRuntime
@@ -21,10 +21,10 @@ var (
_ storageTree = (*nestedTree)(nil)
)
-func asNameToFileStorageMap(storages []storageRuntime) map[string]filestorage.FileStorage {
- lookup := make(map[string]filestorage.FileStorage)
+func asNameToFileStorageMap(storages []storageRuntime) map[string]storageRuntime {
+ lookup := make(map[string]storageRuntime)
for _, storage := range storages {
- lookup[storage.Meta().Config.Prefix] = storage.Store()
+ lookup[storage.Meta().Config.Prefix] = storage
}
return lookup
}
@@ -33,7 +33,7 @@ func (t *nestedTree) init() {
t.orgInitMutex.Lock()
defer t.orgInitMutex.Unlock()
- t.lookup = make(map[int64]map[string]filestorage.FileStorage, len(t.rootsByOrgId))
+ t.lookup = make(map[int64]map[string]storageRuntime, len(t.rootsByOrgId))
for orgId, storages := range t.rootsByOrgId {
t.lookup[orgId] = asNameToFileStorageMap(storages)
@@ -50,7 +50,7 @@ func (t *nestedTree) assureOrgIsInitialized(orgId int64) {
}
}
-func (t *nestedTree) getRoot(orgId int64, path string) (filestorage.FileStorage, string) {
+func (t *nestedTree) getRoot(orgId int64, path string) (storageRuntime, string) {
t.assureOrgIsInitialized(orgId)
if path == "" {
@@ -82,7 +82,7 @@ func (t *nestedTree) GetFile(ctx context.Context, orgId int64, path string) (*fi
if root == nil {
return nil, nil // not found (or not ready)
}
- return root.Get(ctx, path)
+ return root.Store().Get(ctx, path)
}
func (t *nestedTree) ListFolder(ctx context.Context, orgId int64, path string) (*StorageListFrame, error) {
@@ -146,7 +146,7 @@ func (t *nestedTree) ListFolder(ctx context.Context, orgId int64, path string) (
return nil, nil // not found (or not ready)
}
- listResponse, err := root.List(ctx, path, nil, &filestorage.ListOptions{
+ listResponse, err := root.Store().List(ctx, path, nil, &filestorage.ListOptions{
Recursive: false,
WithFolders: true,
WithFiles: true,
diff --git a/pkg/services/store/validate.go b/pkg/services/store/validate.go
index 7ad7cf391bf..cc0c2f011dc 100644
--- a/pkg/services/store/validate.go
+++ b/pkg/services/store/validate.go
@@ -74,15 +74,15 @@ func (s *standardStorageService) validateUploadRequest(ctx context.Context, user
}
switch req.EntityType {
+ case EntityTypeJSON:
+ fallthrough
case EntityTypeFolder:
fallthrough
case EntityTypeDashboard:
// TODO: add proper validation
- var something interface{}
- if err := json.Unmarshal(req.Contents, &something); err != nil {
- return fail(err.Error())
+ if !json.Valid(req.Contents) {
+ return fail("invalid json")
}
-
return success()
case EntityTypeImage:
return s.validateImage(ctx, user, req)
diff --git a/pkg/setting/setting.go b/pkg/setting/setting.go
index 426907ac6ef..a67ca376306 100644
--- a/pkg/setting/setting.go
+++ b/pkg/setting/setting.go
@@ -1445,6 +1445,12 @@ func readAlertingSettings(iniFile *ini.File) error {
return nil
}
+// IsLegacyAlertingEnabled returns whether the legacy alerting is enabled or not.
+// It's safe to be used only after readAlertingSettings() and ReadUnifiedAlertingSettings() are executed.
+func IsLegacyAlertingEnabled() bool {
+ return AlertingEnabled != nil && *AlertingEnabled
+}
+
func readSnapshotsSettings(cfg *Cfg, iniFile *ini.File) error {
snapshots := iniFile.Section("snapshots")
diff --git a/pkg/setting/setting_unified_alerting.go b/pkg/setting/setting_unified_alerting.go
index 5762760c38d..1d8fcbb3ed7 100644
--- a/pkg/setting/setting_unified_alerting.go
+++ b/pkg/setting/setting_unified_alerting.go
@@ -26,7 +26,8 @@ const (
alertmanagerDefaultConfiguration = `{
"alertmanager_config": {
"route": {
- "receiver": "grafana-default-email"
+ "receiver": "grafana-default-email",
+ "group_by": ["grafana_folder", "alertname"]
},
"receivers": [{
"name": "grafana-default-email",
@@ -81,6 +82,7 @@ type UnifiedAlertingSettings struct {
// DefaultRuleEvaluationInterval default interval between evaluations of a rule.
DefaultRuleEvaluationInterval time.Duration
Screenshots UnifiedAlertingScreenshotSettings
+ ReservedLabels UnifiedAlertingReservedLabelSettings
}
type UnifiedAlertingScreenshotSettings struct {
@@ -89,12 +91,22 @@ type UnifiedAlertingScreenshotSettings struct {
UploadExternalImageStorage bool
}
+type UnifiedAlertingReservedLabelSettings struct {
+ DisabledLabels map[string]struct{}
+}
+
// IsEnabled returns true if UnifiedAlertingSettings.Enabled is either nil or true.
// It hides the implementation details of the Enabled and simplifies its usage.
func (u *UnifiedAlertingSettings) IsEnabled() bool {
return u.Enabled == nil || *u.Enabled
}
+// IsReservedLabelDisabled returns true if UnifiedAlertingReservedLabelSettings.DisabledLabels contains the given reserved label.
+func (u *UnifiedAlertingReservedLabelSettings) IsReservedLabelDisabled(label string) bool {
+ _, ok := u.DisabledLabels[label]
+ return ok
+}
+
// readUnifiedAlertingEnabledSettings reads the settings for unified alerting.
// It returns a non-nil bool and a nil error when unified alerting is enabled either
// because it has been enabled in the settings or by default. It returns nil and
@@ -273,6 +285,15 @@ func (cfg *Cfg) ReadUnifiedAlertingSettings(iniFile *ini.File) error {
uaCfgScreenshots.UploadExternalImageStorage = screenshots.Key("upload_external_image_storage").MustBool(screenshotsDefaultUploadImageStorage)
uaCfg.Screenshots = uaCfgScreenshots
+ reservedLabels := iniFile.Section("unified_alerting.reserved_labels")
+ uaCfgReservedLabels := UnifiedAlertingReservedLabelSettings{
+ DisabledLabels: make(map[string]struct{}),
+ }
+ for _, label := range util.SplitString(reservedLabels.Key("disabled_labels").MustString("")) {
+ uaCfgReservedLabels.DisabledLabels[label] = struct{}{}
+ }
+ uaCfg.ReservedLabels = uaCfgReservedLabels
+
cfg.UnifiedAlerting = uaCfg
return nil
}
diff --git a/pkg/tests/api/alerting/api_alertmanager_test.go b/pkg/tests/api/alerting/api_alertmanager_test.go
index 72040631eba..9c34290fbf1 100644
--- a/pkg/tests/api/alerting/api_alertmanager_test.go
+++ b/pkg/tests/api/alerting/api_alertmanager_test.go
@@ -1832,7 +1832,8 @@ func TestAlertmanagerStatus(t *testing.T) {
},
"config": {
"route": {
- "receiver": "grafana-default-email"
+ "receiver": "grafana-default-email",
+ "group_by": ["grafana_folder", "alertname"]
},
"templates": null,
"receivers": [{
diff --git a/pkg/tests/api/alerting/testing.go b/pkg/tests/api/alerting/testing.go
index 83fe4b38d1c..a9e1883736b 100644
--- a/pkg/tests/api/alerting/testing.go
+++ b/pkg/tests/api/alerting/testing.go
@@ -25,7 +25,8 @@ const defaultAlertmanagerConfigJSON = `
"template_files": null,
"alertmanager_config": {
"route": {
- "receiver": "grafana-default-email"
+ "receiver": "grafana-default-email",
+ "group_by": ["grafana_folder", "alertname"]
},
"templates": null,
"receivers": [{
diff --git a/pkg/util/errutil/status.go b/pkg/util/errutil/status.go
index 379c23e54c3..a1d02f2eae1 100644
--- a/pkg/util/errutil/status.go
+++ b/pkg/util/errutil/status.go
@@ -110,6 +110,10 @@ func (s CoreStatus) LogLevel() LogLevel {
}
}
+func (s CoreStatus) String() string {
+ return string(s)
+}
+
// ProxyStatus implies that an error originated from the data source
// proxy.
type ProxyStatus CoreStatus
diff --git a/public/api-merged.json b/public/api-merged.json
index b3a8c4cd583..c83c8d61fb2 100644
--- a/public/api-merged.json
+++ b/public/api-merged.json
@@ -5806,6 +5806,231 @@
}
}
},
+ "/playlists": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlists.",
+ "operationId": "searchPlaylists",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "query",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "format": "int64",
+ "description": "in:limit",
+ "name": "limit",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/searchPlaylistsResponse"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "post": {
+ "tags": ["playlists"],
+ "summary": "Create playlist.",
+ "operationId": "createPlaylist",
+ "parameters": [
+ {
+ "name": "Body",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/CreatePlaylistCommand"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/createPlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist by UID.",
+ "operationId": "getPlaylist",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "put": {
+ "tags": ["playlists"],
+ "summary": "Update playlist.",
+ "operationId": "updatePlaylist",
+ "parameters": [
+ {
+ "name": "Body",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/UpdatePlaylistCommand"
+ }
+ },
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/updatePlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "delete": {
+ "tags": ["playlists"],
+ "summary": "Delete pllaylist.",
+ "operationId": "deletePlaylist",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/okResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}/dashboards": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist dashboards.",
+ "operationId": "getPlaylistDashboards",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistDashboardsResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}/items": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist items.",
+ "operationId": "getPlaylistItems",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistItemsResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
"/query-history": {
"get": {
"description": "Returns a list of queries in the query history that matches the search criteria.\nQuery history search supports pagination. Use the `limit` parameter to control the maximum number of queries returned; the default limit is 100.\nYou can also use the `page` query parameter to fetch queries from any page other than the first one.",
@@ -8203,15 +8428,15 @@
"name": "Body",
"in": "body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"201": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -8239,9 +8464,9 @@
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"404": {
@@ -8266,15 +8491,15 @@
"name": "Body",
"in": "body",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
}
],
"responses": {
"200": {
- "description": "AlertRule",
+ "description": "ProvisionedAlertRule",
"schema": {
- "$ref": "#/definitions/AlertRule"
+ "$ref": "#/definitions/ProvisionedAlertRule"
}
},
"400": {
@@ -8408,6 +8633,36 @@
}
},
"/v1/provisioning/folder/{FolderUID}/rule-groups/{Group}": {
+ "get": {
+ "tags": ["provisioning"],
+ "summary": "Get a rule group.",
+ "operationId": "RouteGetAlertRuleGroup",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "FolderUID",
+ "in": "path",
+ "required": true
+ },
+ {
+ "type": "string",
+ "name": "Group",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "description": "AlertRuleGroup",
+ "schema": {
+ "$ref": "#/definitions/AlertRuleGroup"
+ }
+ },
+ "404": {
+ "description": " Not found."
+ }
+ }
+ },
"put": {
"consumes": ["application/json"],
"tags": ["provisioning"],
@@ -8430,15 +8685,15 @@
"name": "Body",
"in": "body",
"schema": {
- "$ref": "#/definitions/AlertRuleGroup"
+ "$ref": "#/definitions/AlertRuleGroupMetadata"
}
}
],
"responses": {
"200": {
- "description": "AlertRuleGroup",
+ "description": "AlertRuleGroupMetadata",
"schema": {
- "$ref": "#/definitions/AlertRuleGroup"
+ "$ref": "#/definitions/AlertRuleGroupMetadata"
}
},
"400": {
@@ -9321,137 +9576,107 @@
},
"AlertRule": {
"type": "object",
- "required": [
- "orgID",
- "folderUID",
- "ruleGroup",
- "title",
- "condition",
- "data",
- "noDataState",
- "execErrState",
- "for"
- ],
+ "title": "AlertRule is the model for alert rules in unified alerting.",
"properties": {
- "annotations": {
+ "Annotations": {
"type": "object",
"additionalProperties": {
"type": "string"
- },
- "example": {
- "runbook_url": "https://supercoolrunbook.com/page/13"
}
},
- "condition": {
- "type": "string",
- "example": "A"
+ "Condition": {
+ "type": "string"
},
- "data": {
+ "DashboardUID": {
+ "type": "string"
+ },
+ "Data": {
"type": "array",
"items": {
"$ref": "#/definitions/AlertQuery"
- },
- "example": [
- {
- "datasourceUid": "-100",
- "model": {
- "conditions": [
- {
- "evaluator": {
- "params": [0, 0],
- "type": "gt"
- },
- "operator": {
- "type": "and"
- },
- "query": {
- "params": null
- },
- "reducer": {
- "params": null,
- "type": "avg"
- },
- "type": "query"
- }
- ],
- "datasource": {
- "type": "__expr__",
- "uid": "__expr__"
- },
- "expression": "1 == 1",
- "hide": false,
- "intervalMs": 1000,
- "maxDataPoints": 43200,
- "refId": "A",
- "type": "math"
- },
- "queryType": "",
- "refId": "A",
- "relativeTimeRange": {
- "from": 0,
- "to": 0
- }
- }
- ]
+ }
},
- "execErrState": {
+ "ExecErrState": {
"type": "string",
"enum": ["Alerting", "Error", "OK"]
},
- "folderUID": {
- "type": "string",
- "example": "project_x"
- },
- "for": {
+ "For": {
"$ref": "#/definitions/Duration"
},
- "id": {
+ "ID": {
"type": "integer",
"format": "int64"
},
- "labels": {
+ "IntervalSeconds": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "Labels": {
"type": "object",
"additionalProperties": {
"type": "string"
- },
- "example": {
- "team": "sre-team-1"
}
},
- "noDataState": {
+ "NamespaceUID": {
+ "type": "string"
+ },
+ "NoDataState": {
"type": "string",
"enum": ["Alerting", "NoData", "OK"]
},
- "orgID": {
+ "OrgID": {
"type": "integer",
"format": "int64"
},
- "provenance": {
- "$ref": "#/definitions/Provenance"
+ "PanelID": {
+ "type": "integer",
+ "format": "int64"
},
- "ruleGroup": {
- "type": "string",
- "maxLength": 190,
- "minLength": 1,
- "example": "eval_group_1"
+ "RuleGroup": {
+ "type": "string"
},
- "title": {
- "type": "string",
- "maxLength": 190,
- "minLength": 1,
- "example": "Always firing"
+ "RuleGroupIndex": {
+ "type": "integer",
+ "format": "int64"
},
- "uid": {
+ "Title": {
"type": "string"
},
- "updated": {
+ "UID": {
+ "type": "string"
+ },
+ "Updated": {
"type": "string",
- "format": "date-time",
- "readOnly": true
+ "format": "date-time"
+ },
+ "Version": {
+ "type": "integer",
+ "format": "int64"
}
}
},
"AlertRuleGroup": {
+ "type": "object",
+ "properties": {
+ "folderUid": {
+ "type": "string"
+ },
+ "interval": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "rules": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/AlertRule"
+ }
+ },
+ "title": {
+ "type": "string"
+ }
+ }
+ },
+ "AlertRuleGroupMetadata": {
"type": "object",
"properties": {
"interval": {
@@ -10040,6 +10265,26 @@
}
}
},
+ "CreatePlaylistCommand": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "$ref": "#/definitions/Playlist"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ }
+ }
+ },
"CreateQueryInQueryHistoryCommand": {
"description": "CreateQueryInQueryHistoryCommand is the command for adding query history",
"type": "object",
@@ -12377,6 +12622,9 @@
"type": "string",
"example": "now-1h"
},
+ "publicDashboardAccessToken": {
+ "type": "string"
+ },
"queries": {
"description": "queries.refId – Specifies an identifier of the query. Is optional and default to “A”.\nqueries.datasourceId – Specifies the data source to be queried. Each query in the request must have an unique datasourceId.\nqueries.maxDataPoints - Species maximum amount of data points that dashboard panel can render. Is optional and default to 100.\nqueries.intervalMs - Specifies the time interval in milliseconds of time series. Is optional and defaults to 1000.",
"type": "array",
@@ -12954,52 +13202,158 @@
}
}
},
- "PauseAlertCommand": {
+ "PauseAlertCommand": {
+ "type": "object",
+ "properties": {
+ "alertId": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "paused": {
+ "type": "boolean"
+ }
+ }
+ },
+ "PauseAllAlertsCommand": {
+ "type": "object",
+ "properties": {
+ "paused": {
+ "type": "boolean"
+ }
+ }
+ },
+ "Permission": {
+ "type": "object",
+ "title": "Permission is the model for access control permissions.",
+ "properties": {
+ "action": {
+ "type": "string"
+ },
+ "created": {
+ "type": "string",
+ "format": "date-time"
+ },
+ "scope": {
+ "type": "string"
+ },
+ "updated": {
+ "type": "string",
+ "format": "date-time"
+ }
+ }
+ },
+ "PermissionDenied": {
+ "type": "object"
+ },
+ "PermissionType": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "Playlist": {
+ "description": "Playlist model",
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
+ "PlaylistDTO": {
"type": "object",
"properties": {
- "alertId": {
+ "id": {
"type": "integer",
"format": "int64"
},
- "paused": {
- "type": "boolean"
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
}
}
},
- "PauseAllAlertsCommand": {
+ "PlaylistDashboard": {
"type": "object",
"properties": {
- "paused": {
- "type": "boolean"
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "order": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "slug": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "uri": {
+ "type": "string"
+ },
+ "url": {
+ "type": "string"
}
}
},
- "Permission": {
+ "PlaylistDashboardsSlice": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistDashboard"
+ }
+ },
+ "PlaylistItemDTO": {
"type": "object",
- "title": "Permission is the model for access control permissions.",
"properties": {
- "action": {
- "type": "string"
+ "id": {
+ "type": "integer",
+ "format": "int64"
},
- "created": {
- "type": "string",
- "format": "date-time"
+ "order": {
+ "type": "integer",
+ "format": "int64"
},
- "scope": {
+ "playlistid": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "title": {
"type": "string"
},
- "updated": {
- "type": "string",
- "format": "date-time"
+ "type": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
}
}
},
- "PermissionDenied": {
- "type": "object"
- },
- "PermissionType": {
- "type": "integer",
- "format": "int64"
+ "Playlists": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/Playlist"
+ }
},
"Point": {
"type": "object",
@@ -13364,6 +13718,138 @@
"Provenance": {
"type": "string"
},
+ "ProvisionedAlertRule": {
+ "type": "object",
+ "required": [
+ "orgID",
+ "folderUID",
+ "ruleGroup",
+ "title",
+ "condition",
+ "data",
+ "noDataState",
+ "execErrState",
+ "for"
+ ],
+ "properties": {
+ "annotations": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "runbook_url": "https://supercoolrunbook.com/page/13"
+ }
+ },
+ "condition": {
+ "type": "string",
+ "example": "A"
+ },
+ "data": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/AlertQuery"
+ },
+ "example": [
+ {
+ "datasourceUid": "-100",
+ "model": {
+ "conditions": [
+ {
+ "evaluator": {
+ "params": [0, 0],
+ "type": "gt"
+ },
+ "operator": {
+ "type": "and"
+ },
+ "query": {
+ "params": null
+ },
+ "reducer": {
+ "params": null,
+ "type": "avg"
+ },
+ "type": "query"
+ }
+ ],
+ "datasource": {
+ "type": "__expr__",
+ "uid": "__expr__"
+ },
+ "expression": "1 == 1",
+ "hide": false,
+ "intervalMs": 1000,
+ "maxDataPoints": 43200,
+ "refId": "A",
+ "type": "math"
+ },
+ "queryType": "",
+ "refId": "A",
+ "relativeTimeRange": {
+ "from": 0,
+ "to": 0
+ }
+ }
+ ]
+ },
+ "execErrState": {
+ "type": "string",
+ "enum": ["Alerting", "Error", "OK"]
+ },
+ "folderUID": {
+ "type": "string",
+ "example": "project_x"
+ },
+ "for": {
+ "$ref": "#/definitions/Duration"
+ },
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "labels": {
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ },
+ "example": {
+ "team": "sre-team-1"
+ }
+ },
+ "noDataState": {
+ "type": "string",
+ "enum": ["Alerting", "NoData", "OK"]
+ },
+ "orgID": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "provenance": {
+ "$ref": "#/definitions/Provenance"
+ },
+ "ruleGroup": {
+ "type": "string",
+ "maxLength": 190,
+ "minLength": 1,
+ "example": "eval_group_1"
+ },
+ "title": {
+ "type": "string",
+ "maxLength": 190,
+ "minLength": 1,
+ "example": "Always firing"
+ },
+ "uid": {
+ "type": "string"
+ },
+ "updated": {
+ "type": "string",
+ "format": "date-time",
+ "readOnly": true
+ }
+ }
+ },
"PushoverConfig": {
"type": "object",
"properties": {
@@ -15010,8 +15496,9 @@
"type": "string"
},
"URL": {
+ "description": "The general form represented is:\n\n[scheme:][//[userinfo@]host][/]path[?query][#fragment]\n\nURLs that do not start with a slash after the scheme are interpreted as:\n\nscheme:opaque[?query][#fragment]\n\nNote that the Path field is stored in decoded form: /%47%6f%2f becomes /Go/.\nA consequence is that it is impossible to tell which slashes in the Path were\nslashes in the raw URL and which were %2f. This distinction is rarely important,\nbut when it is, the code should use RawPath, an optional field which only gets\nset if the default encoding is different from Path.\n\nURL's String method uses the EscapedPath method to obtain the path. See the\nEscapedPath method for more details.",
"type": "object",
- "title": "URL is a custom URL type that allows validation at configuration load time.",
+ "title": "A URL represents a parsed URL (technically, a URI reference).",
"properties": {
"ForceQuery": {
"type": "boolean"
@@ -15278,6 +15765,29 @@
}
}
},
+ "UpdatePlaylistCommand": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "$ref": "#/definitions/PlaylistDTO"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
"UpdatePrefsCmd": {
"type": "object",
"properties": {
@@ -15788,7 +16298,6 @@
}
},
"alertGroup": {
- "description": "AlertGroup alert group",
"type": "object",
"required": ["alerts", "labels", "receiver"],
"properties": {
@@ -15945,13 +16454,13 @@
}
},
"gettableAlerts": {
+ "description": "GettableAlerts gettable alerts",
"type": "array",
"items": {
"$ref": "#/definitions/gettableAlert"
}
},
"gettableSilence": {
- "description": "GettableSilence gettable silence",
"type": "object",
"required": ["comment", "createdBy", "endsAt", "matchers", "startsAt", "id", "status", "updatedAt"],
"properties": {
@@ -16125,6 +16634,7 @@
}
},
"receiver": {
+ "description": "Receiver receiver",
"type": "object",
"required": ["name"],
"properties": {
@@ -16325,6 +16835,12 @@
}
}
},
+ "createPlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/Playlist"
+ }
+ },
"createReportResponse": {
"description": "",
"schema": {
@@ -16745,6 +17261,27 @@
"$ref": "#/definitions/AddPermissionDTO"
}
},
+ "getPlaylistDashboardsResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDashboardsSlice"
+ }
+ },
+ "getPlaylistItemsResponse": {
+ "description": "",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ }
+ },
+ "getPlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDTO"
+ }
+ },
"getPreferencesResponse": {
"description": "",
"schema": {
@@ -17098,6 +17635,12 @@
}
}
},
+ "searchPlaylistsResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/Playlists"
+ }
+ },
"searchResponse": {
"description": "",
"schema": {
@@ -17163,6 +17706,12 @@
"$ref": "#/definitions/ErrorResponseBody"
}
},
+ "updatePlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDTO"
+ }
+ },
"userResponse": {
"description": "",
"schema": {
diff --git a/public/api-spec.json b/public/api-spec.json
index 6166c6c37ab..b438b6d5e39 100644
--- a/public/api-spec.json
+++ b/public/api-spec.json
@@ -5806,6 +5806,231 @@
}
}
},
+ "/playlists": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlists.",
+ "operationId": "searchPlaylists",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "query",
+ "in": "query"
+ },
+ {
+ "type": "integer",
+ "format": "int64",
+ "description": "in:limit",
+ "name": "limit",
+ "in": "query"
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/searchPlaylistsResponse"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "post": {
+ "tags": ["playlists"],
+ "summary": "Create playlist.",
+ "operationId": "createPlaylist",
+ "parameters": [
+ {
+ "name": "Body",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/CreatePlaylistCommand"
+ }
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/createPlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist by UID.",
+ "operationId": "getPlaylist",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "put": {
+ "tags": ["playlists"],
+ "summary": "Update playlist.",
+ "operationId": "updatePlaylist",
+ "parameters": [
+ {
+ "name": "Body",
+ "in": "body",
+ "required": true,
+ "schema": {
+ "$ref": "#/definitions/UpdatePlaylistCommand"
+ }
+ },
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/updatePlaylistResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ },
+ "delete": {
+ "tags": ["playlists"],
+ "summary": "Delete pllaylist.",
+ "operationId": "deletePlaylist",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/okResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}/dashboards": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist dashboards.",
+ "operationId": "getPlaylistDashboards",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistDashboardsResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
+ "/playlists/{uid}/items": {
+ "get": {
+ "tags": ["playlists"],
+ "summary": "Get playlist items.",
+ "operationId": "getPlaylistItems",
+ "parameters": [
+ {
+ "type": "string",
+ "name": "uid",
+ "in": "path",
+ "required": true
+ }
+ ],
+ "responses": {
+ "200": {
+ "$ref": "#/responses/getPlaylistItemsResponse"
+ },
+ "401": {
+ "$ref": "#/responses/unauthorisedError"
+ },
+ "403": {
+ "$ref": "#/responses/forbiddenError"
+ },
+ "404": {
+ "$ref": "#/responses/notFoundError"
+ },
+ "500": {
+ "$ref": "#/responses/internalServerError"
+ }
+ }
+ }
+ },
"/query-history": {
"get": {
"description": "Returns a list of queries in the query history that matches the search criteria.\nQuery history search supports pagination. Use the `limit` parameter to control the maximum number of queries returned; the default limit is 100.\nYou can also use the `page` query parameter to fetch queries from any page other than the first one.",
@@ -9170,6 +9395,26 @@
}
}
},
+ "CreatePlaylistCommand": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "$ref": "#/definitions/Playlist"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ }
+ }
+ },
"CreateQueryInQueryHistoryCommand": {
"description": "CreateQueryInQueryHistoryCommand is the command for adding query history",
"type": "object",
@@ -10707,6 +10952,9 @@
"type": "string",
"example": "now-1h"
},
+ "publicDashboardAccessToken": {
+ "type": "string"
+ },
"queries": {
"description": "queries.refId – Specifies an identifier of the query. Is optional and default to “A”.\nqueries.datasourceId – Specifies the data source to be queried. Each query in the request must have an unique datasourceId.\nqueries.maxDataPoints - Species maximum amount of data points that dashboard panel can render. Is optional and default to 100.\nqueries.intervalMs - Specifies the time interval in milliseconds of time series. Is optional and defaults to 1000.",
"type": "array",
@@ -11066,6 +11314,112 @@
"type": "integer",
"format": "int64"
},
+ "Playlist": {
+ "description": "Playlist model",
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
+ "PlaylistDTO": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
+ "PlaylistDashboard": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "order": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "slug": {
+ "type": "string"
+ },
+ "title": {
+ "type": "string"
+ },
+ "uri": {
+ "type": "string"
+ },
+ "url": {
+ "type": "string"
+ }
+ }
+ },
+ "PlaylistDashboardsSlice": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistDashboard"
+ }
+ },
+ "PlaylistItemDTO": {
+ "type": "object",
+ "properties": {
+ "id": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "order": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "playlistid": {
+ "type": "integer",
+ "format": "int64"
+ },
+ "title": {
+ "type": "string"
+ },
+ "type": {
+ "type": "string"
+ },
+ "value": {
+ "type": "string"
+ }
+ }
+ },
+ "Playlists": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/Playlist"
+ }
+ },
"PostAnnotationsCmd": {
"type": "object",
"properties": {
@@ -12319,6 +12673,29 @@
}
}
},
+ "UpdatePlaylistCommand": {
+ "type": "object",
+ "properties": {
+ "Result": {
+ "$ref": "#/definitions/PlaylistDTO"
+ },
+ "interval": {
+ "type": "string"
+ },
+ "items": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ },
+ "name": {
+ "type": "string"
+ },
+ "uid": {
+ "type": "string"
+ }
+ }
+ },
"UpdatePrefsCmd": {
"type": "object",
"properties": {
@@ -12784,6 +13161,12 @@
}
}
},
+ "createPlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/Playlist"
+ }
+ },
"createReportResponse": {
"description": "",
"schema": {
@@ -13204,6 +13587,27 @@
"$ref": "#/definitions/AddPermissionDTO"
}
},
+ "getPlaylistDashboardsResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDashboardsSlice"
+ }
+ },
+ "getPlaylistItemsResponse": {
+ "description": "",
+ "schema": {
+ "type": "array",
+ "items": {
+ "$ref": "#/definitions/PlaylistItemDTO"
+ }
+ }
+ },
+ "getPlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDTO"
+ }
+ },
"getPreferencesResponse": {
"description": "",
"schema": {
@@ -13557,6 +13961,12 @@
}
}
},
+ "searchPlaylistsResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/Playlists"
+ }
+ },
"searchResponse": {
"description": "",
"schema": {
@@ -13622,6 +14032,12 @@
"$ref": "#/definitions/ErrorResponseBody"
}
},
+ "updatePlaylistResponse": {
+ "description": "",
+ "schema": {
+ "$ref": "#/definitions/PlaylistDTO"
+ }
+ },
"userResponse": {
"description": "",
"schema": {
diff --git a/public/app/core/components/NavBar/navBarItem-translations.ts b/public/app/core/components/NavBar/navBarItem-translations.ts
index 3d3ab5f31e4..7f517bf3f47 100644
--- a/public/app/core/components/NavBar/navBarItem-translations.ts
+++ b/public/app/core/components/NavBar/navBarItem-translations.ts
@@ -54,8 +54,8 @@ const TRANSLATED_MENU_ITEMS: Record = {
help: defineMessage({ id: 'nav.help', message: 'Help' }),
- 'profile-settings': defineMessage({ id: 'nav.profile-settings', message: 'Preferences' }),
- 'change-password': defineMessage({ id: 'nav.change-password', message: 'Change password' }),
+ 'profile-settings': defineMessage({ id: 'nav.profile/settings', message: 'Preferences' }),
+ 'change-password': defineMessage({ id: 'nav.profile/password', message: 'Change password' }),
'sign-out': defineMessage({ id: 'nav.sign-out', message: 'Sign out' }),
};
diff --git a/public/app/core/components/NavBar/utils.test.ts b/public/app/core/components/NavBar/utils.test.ts
index b062a199130..5ff932646a0 100644
--- a/public/app/core/components/NavBar/utils.test.ts
+++ b/public/app/core/components/NavBar/utils.test.ts
@@ -1,6 +1,6 @@
import { Location } from 'history';
-import { NavModelItem } from '@grafana/data';
+import { GrafanaConfig, locationUtil, NavModelItem } from '@grafana/data';
import { ContextSrv, setContextSrv } from 'app/core/services/context_srv';
import { updateConfig } from '../../config';
@@ -166,6 +166,10 @@ describe('getActiveItem', () => {
text: 'Item with query param',
url: '/itemWithQueryParam?foo=bar',
},
+ {
+ text: 'Item after subpath',
+ url: '/subUrl/itemAfterSubpath',
+ },
{
text: 'Item with children',
url: '/itemWithChildren',
@@ -193,6 +197,13 @@ describe('getActiveItem', () => {
url: '/d/moreSpecificDashboard',
},
];
+ beforeEach(() => {
+ locationUtil.initialize({
+ config: { appSubUrl: '/subUrl' } as GrafanaConfig,
+ getVariablesUrlParams: () => ({}),
+ getTimeRangeForUrl: () => ({ from: 'now-7d', to: 'now' }),
+ });
+ });
it('returns an exact match at the top level', () => {
const mockPathName = '/item';
@@ -202,6 +213,14 @@ describe('getActiveItem', () => {
});
});
+ it('returns an exact match ignoring root subpath', () => {
+ const mockPathName = '/itemAfterSubpath';
+ expect(getActiveItem(mockNavTree, mockPathName)).toEqual({
+ text: 'Item after subpath',
+ url: '/subUrl/itemAfterSubpath',
+ });
+ });
+
it('returns an exact match ignoring query params', () => {
const mockPathName = '/itemWithQueryParam?bar=baz';
expect(getActiveItem(mockNavTree, mockPathName)).toEqual({
diff --git a/public/app/core/components/NavBar/utils.ts b/public/app/core/components/NavBar/utils.ts
index 80c0838c661..e1ad661cce8 100644
--- a/public/app/core/components/NavBar/utils.ts
+++ b/public/app/core/components/NavBar/utils.ts
@@ -1,6 +1,6 @@
import { Location } from 'history';
-import { NavModelItem, NavSection } from '@grafana/data';
+import { locationUtil, NavModelItem, NavSection } from '@grafana/data';
import { reportInteraction } from '@grafana/runtime';
import { getConfig } from 'app/core/config';
import { contextSrv } from 'app/core/services/context_srv';
@@ -120,7 +120,8 @@ export const getActiveItem = (
const dashboardLinkMatch = '/dashboards';
for (const link of navTree) {
- const linkPathname = stripQueryParams(link.url);
+ const linkWithoutParams = stripQueryParams(link.url);
+ const linkPathname = locationUtil.stripBaseFromUrl(linkWithoutParams);
if (linkPathname) {
if (linkPathname === pathname) {
// exact match
diff --git a/public/app/core/components/PageNew/SectionNav.tsx b/public/app/core/components/PageNew/SectionNav.tsx
index 3570de4294f..0172722562d 100644
--- a/public/app/core/components/PageNew/SectionNav.tsx
+++ b/public/app/core/components/PageNew/SectionNav.tsx
@@ -19,7 +19,7 @@ export function SectionNav(props: Props) {
{main.icon && }
- {main.img && }
+ {main.img && }
{props.model.main.text}
@@ -75,6 +75,7 @@ const getStyles = (theme: GrafanaTheme2) => {
}),
sectionName: css({
display: 'flex',
+ alignItems: 'center',
gap: theme.spacing(1),
padding: theme.spacing(0.5, 0, 3, 0.25),
fontSize: theme.typography.h4.fontSize,
@@ -83,6 +84,9 @@ const getStyles = (theme: GrafanaTheme2) => {
items: css({
// paddingLeft: '9px',
}),
+ sectionImg: css({
+ height: 48,
+ }),
subSection: css({
padding: theme.spacing(3, 0, 1, 1),
fontWeight: 500,
diff --git a/public/app/core/utils/timeRegions.test.ts b/public/app/core/utils/timeRegions.test.ts
new file mode 100644
index 00000000000..16a20e47d24
--- /dev/null
+++ b/public/app/core/utils/timeRegions.test.ts
@@ -0,0 +1,43 @@
+import { dateTime, TimeRange } from '@grafana/data';
+
+import { calculateTimesWithin, TimeRegionConfig } from './timeRegions';
+
+describe('timeRegions', () => {
+ describe('day of week', () => {
+ it('4 sundays in january 2021', () => {
+ const cfg: TimeRegionConfig = {
+ fromDayOfWeek: 1,
+ from: '12:00',
+ };
+ const tr: TimeRange = {
+ from: dateTime('2021-01-00', 'YYYY-MM-dd'),
+ to: dateTime('2021-02-00', 'YYYY-MM-dd'),
+ raw: {
+ to: '',
+ from: '',
+ },
+ };
+ const regions = calculateTimesWithin(cfg, tr);
+ expect(regions).toMatchInlineSnapshot(`
+ Array [
+ Object {
+ "from": 1609779600000,
+ "to": 1609779600000,
+ },
+ Object {
+ "from": 1610384400000,
+ "to": 1610384400000,
+ },
+ Object {
+ "from": 1610989200000,
+ "to": 1610989200000,
+ },
+ Object {
+ "from": 1611594000000,
+ "to": 1611594000000,
+ },
+ ]
+ `);
+ });
+ });
+});
diff --git a/public/app/core/utils/timeRegions.ts b/public/app/core/utils/timeRegions.ts
new file mode 100644
index 00000000000..329453cf014
--- /dev/null
+++ b/public/app/core/utils/timeRegions.ts
@@ -0,0 +1,168 @@
+import { AbsoluteTimeRange, dateTime, TimeRange } from '@grafana/data';
+
+export interface TimeRegionConfig {
+ from?: string;
+ fromDayOfWeek?: number; // 1-7
+
+ to?: string;
+ toDayOfWeek?: number; // 1-7
+}
+
+interface ParsedTime {
+ dayOfWeek?: number; // 1-7
+ h?: number; // 0-23
+ m?: number; // 0-59
+ s?: number; // 0-59
+}
+
+export function calculateTimesWithin(cfg: TimeRegionConfig, tRange: TimeRange): AbsoluteTimeRange[] {
+ if (!(cfg.fromDayOfWeek || cfg.from) && !(cfg.toDayOfWeek || cfg.to)) {
+ return [];
+ }
+
+ // So we can mutate
+ const timeRegion = { ...cfg };
+
+ if (timeRegion.from && !timeRegion.to) {
+ timeRegion.to = timeRegion.from;
+ }
+
+ if (!timeRegion.from && timeRegion.to) {
+ timeRegion.from = timeRegion.to;
+ }
+
+ const hRange = {
+ from: parseTimeRange(timeRegion.from),
+ to: parseTimeRange(timeRegion.to),
+ };
+
+ if (!timeRegion.fromDayOfWeek && timeRegion.toDayOfWeek) {
+ timeRegion.fromDayOfWeek = timeRegion.toDayOfWeek;
+ }
+
+ if (!timeRegion.toDayOfWeek && timeRegion.fromDayOfWeek) {
+ timeRegion.toDayOfWeek = timeRegion.fromDayOfWeek;
+ }
+
+ if (timeRegion.fromDayOfWeek) {
+ hRange.from.dayOfWeek = Number(timeRegion.fromDayOfWeek);
+ }
+
+ if (timeRegion.toDayOfWeek) {
+ hRange.to.dayOfWeek = Number(timeRegion.toDayOfWeek);
+ }
+
+ if (hRange.from.dayOfWeek && hRange.from.h == null && hRange.from.m == null) {
+ hRange.from.h = 0;
+ hRange.from.m = 0;
+ hRange.from.s = 0;
+ }
+
+ if (hRange.to.dayOfWeek && hRange.to.h == null && hRange.to.m == null) {
+ hRange.to.h = 23;
+ hRange.to.m = 59;
+ hRange.to.s = 59;
+ }
+
+ if (!hRange.from || !hRange.to) {
+ return [];
+ }
+
+ if (hRange.from.h == null) {
+ hRange.from.h = 0;
+ }
+
+ if (hRange.to.h == null) {
+ hRange.to.h = 23;
+ }
+
+ const regions: AbsoluteTimeRange[] = [];
+
+ const fromStart = dateTime(tRange.from);
+ fromStart.set('hour', 0);
+ fromStart.set('minute', 0);
+ fromStart.set('second', 0);
+ fromStart.add(hRange.from.h, 'hours');
+ fromStart.add(hRange.from.m, 'minutes');
+ fromStart.add(hRange.from.s, 'seconds');
+
+ while (fromStart.unix() <= tRange.to.unix()) {
+ while (hRange.from.dayOfWeek && hRange.from.dayOfWeek !== fromStart.isoWeekday()) {
+ fromStart.add(24, 'hours');
+ }
+
+ if (fromStart.unix() > tRange.to.unix()) {
+ break;
+ }
+
+ const fromEnd = dateTime(fromStart);
+
+ if (fromEnd.hour) {
+ if (hRange.from.h <= hRange.to.h) {
+ fromEnd.add(hRange.to.h - hRange.from.h, 'hours');
+ } else if (hRange.from.h > hRange.to.h) {
+ while (fromEnd.hour() !== hRange.to.h) {
+ fromEnd.add(1, 'hours');
+ }
+ } else {
+ fromEnd.add(24 - hRange.from.h, 'hours');
+
+ while (fromEnd.hour() !== hRange.to.h) {
+ fromEnd.add(1, 'hours');
+ }
+ }
+ }
+
+ fromEnd.set('minute', hRange.to.m ?? 0);
+ fromEnd.set('second', hRange.to.s ?? 0);
+
+ while (hRange.to.dayOfWeek && hRange.to.dayOfWeek !== fromEnd.isoWeekday()) {
+ fromEnd.add(24, 'hours');
+ }
+
+ const outsideRange =
+ (fromStart.unix() < tRange.from.unix() && fromEnd.unix() < tRange.from.unix()) ||
+ (fromStart.unix() > tRange.to.unix() && fromEnd.unix() > tRange.to.unix());
+
+ if (!outsideRange) {
+ regions.push({ from: fromStart.valueOf(), to: fromEnd.valueOf() });
+ }
+
+ fromStart.add(24, 'hours');
+ }
+
+ return regions;
+}
+
+function parseTimeRange(str?: string): ParsedTime {
+ const result: ParsedTime = {};
+ if (!str?.length) {
+ return result;
+ }
+
+ const timeRegex = /^([\d]+):?(\d{2})?/;
+ const match = timeRegex.exec(str);
+
+ if (!match) {
+ return result;
+ }
+
+ if (match.length > 1) {
+ result.h = Number(match[1]);
+ result.m = 0;
+
+ if (match.length > 2 && match[2] !== undefined) {
+ result.m = Number(match[2]);
+ }
+
+ if (result.h > 23) {
+ result.h = 23;
+ }
+
+ if (result.m > 59) {
+ result.m = 59;
+ }
+ }
+
+ return result;
+}
diff --git a/public/app/features/alerting/unified/AmRoutes.test.tsx b/public/app/features/alerting/unified/AmRoutes.test.tsx
index 819823f6756..96c08e6f39c 100644
--- a/public/app/features/alerting/unified/AmRoutes.test.tsx
+++ b/public/app/features/alerting/unified/AmRoutes.test.tsx
@@ -21,6 +21,7 @@ import { AccessControlAction } from 'app/types';
import AmRoutes from './AmRoutes';
import { fetchAlertManagerConfig, fetchStatus, updateAlertManagerConfig } from './api/alertmanager';
import { mockDataSource, MockDataSourceSrv, someCloudAlertManagerConfig, someCloudAlertManagerStatus } from './mocks';
+import { defaultGroupBy } from './utils/amroutes';
import { getAllDataSources } from './utils/config';
import { ALERTMANAGER_NAME_QUERY_KEY } from './utils/constants';
import { DataSourceType, GRAFANA_RULES_SOURCE_NAME } from './utils/datasource';
@@ -363,7 +364,7 @@ describe('AmRoutes', () => {
receivers: [{ name: 'default' }],
route: {
continue: false,
- group_by: ['severity', 'namespace'],
+ group_by: defaultGroupBy.concat(['severity', 'namespace']),
receiver: 'default',
routes: [],
mute_time_intervals: [],
diff --git a/public/app/features/alerting/unified/PanelAlertTabContent.test.tsx b/public/app/features/alerting/unified/PanelAlertTabContent.test.tsx
index af80c1289a6..a02dc3f4f0f 100644
--- a/public/app/features/alerting/unified/PanelAlertTabContent.test.tsx
+++ b/public/app/features/alerting/unified/PanelAlertTabContent.test.tsx
@@ -328,6 +328,7 @@ describe('PanelAlertTabContent', () => {
model: {
refId: 'B',
hide: false,
+ expression: 'A',
type: 'classic_conditions',
datasource: {
type: ExpressionDatasourceRef.type,
diff --git a/public/app/features/alerting/unified/components/amroutes/AmRootRouteForm.tsx b/public/app/features/alerting/unified/components/amroutes/AmRootRouteForm.tsx
index 3a98e732783..c0d5d24ce0d 100644
--- a/public/app/features/alerting/unified/components/amroutes/AmRootRouteForm.tsx
+++ b/public/app/features/alerting/unified/components/amroutes/AmRootRouteForm.tsx
@@ -10,6 +10,7 @@ import {
optionalPositiveInteger,
stringToSelectableValue,
stringsToSelectableValues,
+ commonGroupByOptions,
} from '../../utils/amroutes';
import { makeAMLink } from '../../utils/misc';
import { timeOptions } from '../../utils/time';
@@ -86,7 +87,7 @@ export const AmRootRouteForm: FC
= ({
setValue('groupBy', [...field.value, opt]);
}}
onChange={(value) => onChange(mapMultiSelectValueToStrings(value))}
- options={groupByOptions}
+ options={[...commonGroupByOptions, groupByOptions]}
/>
)}
control={control}
diff --git a/public/app/features/alerting/unified/components/amroutes/AmRoutesExpandedForm.tsx b/public/app/features/alerting/unified/components/amroutes/AmRoutesExpandedForm.tsx
index d95f984a09c..a58f085ed74 100644
--- a/public/app/features/alerting/unified/components/amroutes/AmRoutesExpandedForm.tsx
+++ b/public/app/features/alerting/unified/components/amroutes/AmRoutesExpandedForm.tsx
@@ -29,6 +29,7 @@ import {
optionalPositiveInteger,
stringToSelectableValue,
stringsToSelectableValues,
+ commonGroupByOptions,
} from '../../utils/amroutes';
import { timeOptions } from '../../utils/time';
@@ -179,7 +180,7 @@ export const AmRoutesExpandedForm: FC = ({ onCancel,
setValue('groupBy', [...field.value, opt]);
}}
onChange={(value) => onChange(mapMultiSelectValueToStrings(value))}
- options={groupByOptions}
+ options={[...commonGroupByOptions, groupByOptions]}
/>
)}
control={control}
diff --git a/public/app/features/alerting/unified/components/rule-editor/QueryEditor.tsx b/public/app/features/alerting/unified/components/rule-editor/QueryEditor.tsx
index ee4979f7445..71a38b2ac18 100644
--- a/public/app/features/alerting/unified/components/rule-editor/QueryEditor.tsx
+++ b/public/app/features/alerting/unified/components/rule-editor/QueryEditor.tsx
@@ -35,6 +35,7 @@ interface Props {
interface State {
panelDataByRefId: Record;
}
+
export class QueryEditor extends PureComponent {
private runner: AlertingQueryRunner;
private queries: AlertQuery[];
@@ -100,12 +101,16 @@ export class QueryEditor extends PureComponent {
onNewExpressionQuery = () => {
const { queries } = this;
+ const lastQuery = queries.at(-1);
+ const defaultParams = lastQuery ? [lastQuery.refId] : [];
+
this.onChangeQueries(
addQuery(queries, {
datasourceUid: ExpressionDatasourceUID,
model: expressionDatasource.newQuery({
type: ExpressionQueryType.classic,
- conditions: [defaultCondition],
+ conditions: [{ ...defaultCondition, query: { params: defaultParams } }],
+ expression: lastQuery?.refId,
}),
})
);
diff --git a/public/app/features/alerting/unified/components/rules/CloudRules.tsx b/public/app/features/alerting/unified/components/rules/CloudRules.tsx
index 018bf91ea62..982a2fc9346 100644
--- a/public/app/features/alerting/unified/components/rules/CloudRules.tsx
+++ b/public/app/features/alerting/unified/components/rules/CloudRules.tsx
@@ -23,13 +23,14 @@ interface Props {
export const CloudRules: FC = ({ namespaces, expandAll }) => {
const styles = useStyles2(getStyles);
+ const dsConfigs = useUnifiedAlertingSelector((state) => state.dataSources);
const rules = useUnifiedAlertingSelector((state) => state.promRules);
const rulesDataSources = useMemo(getRulesDataSources, []);
const groupsWithNamespaces = useCombinedGroupNamespace(namespaces);
const dataSourcesLoading = useMemo(
- () => rulesDataSources.filter((ds) => rules[ds.name]?.loading),
- [rules, rulesDataSources]
+ () => rulesDataSources.filter((ds) => rules[ds.name]?.loading || dsConfigs[ds.name]?.loading),
+ [rules, dsConfigs, rulesDataSources]
);
const { numberOfPages, onPageChange, page, pageItems } = usePagination(
diff --git a/public/app/features/alerting/unified/components/rules/RulesGroup.test.tsx b/public/app/features/alerting/unified/components/rules/RulesGroup.test.tsx
index 6a0fb892c40..cccbab9b0e3 100644
--- a/public/app/features/alerting/unified/components/rules/RulesGroup.test.tsx
+++ b/public/app/features/alerting/unified/components/rules/RulesGroup.test.tsx
@@ -8,17 +8,26 @@ import { contextSrv } from 'app/core/services/context_srv';
import { configureStore } from 'app/store/configureStore';
import { CombinedRuleGroup, CombinedRuleNamespace } from 'app/types/unified-alerting';
+import { useHasRuler } from '../../hooks/useHasRuler';
import { disableRBAC, mockCombinedRule, mockDataSource } from '../../mocks';
import { RulesGroup } from './RulesGroup';
-const hasRulerMock = jest.fn();
-jest.mock('../../hooks/useHasRuler', () => ({
- useHasRuler: () => hasRulerMock,
-}));
+jest.mock('../../hooks/useHasRuler');
+
+const mocks = {
+ useHasRuler: jest.mocked(useHasRuler),
+};
+
+function mockUseHasRuler(hasRuler: boolean, rulerRulesLoaded: boolean) {
+ mocks.useHasRuler.mockReturnValue({
+ hasRuler: () => hasRuler,
+ rulerRulesLoaded: () => rulerRulesLoaded,
+ });
+}
beforeEach(() => {
- hasRulerMock.mockReset();
+ mocks.useHasRuler.mockReset();
});
const ui = {
@@ -55,6 +64,7 @@ describe('Rules group tests', () => {
it('Should hide delete and edit group buttons', () => {
// Act
+ mockUseHasRuler(true, true);
renderRulesGroup(namespace, group);
// Assert
@@ -83,33 +93,33 @@ describe('Rules group tests', () => {
it('When ruler enabled should display delete and edit group buttons', () => {
// Arrange
- hasRulerMock.mockReturnValue(true);
+ mockUseHasRuler(true, true);
// Act
renderRulesGroup(namespace, group);
// Assert
- expect(hasRulerMock).toHaveBeenCalled();
+ expect(mocks.useHasRuler).toHaveBeenCalled();
expect(ui.deleteGroupButton.get()).toBeInTheDocument();
expect(ui.editGroupButton.get()).toBeInTheDocument();
});
it('When ruler disabled should hide delete and edit group buttons', () => {
// Arrange
- hasRulerMock.mockReturnValue(false);
+ mockUseHasRuler(false, false);
// Act
renderRulesGroup(namespace, group);
// Assert
- expect(hasRulerMock).toHaveBeenCalled();
+ expect(mocks.useHasRuler).toHaveBeenCalled();
expect(ui.deleteGroupButton.query()).not.toBeInTheDocument();
expect(ui.editGroupButton.query()).not.toBeInTheDocument();
});
it('Delete button click should display confirmation modal', async () => {
// Arrange
- hasRulerMock.mockReturnValue(true);
+ mockUseHasRuler(true, true);
// Act
renderRulesGroup(namespace, group);
diff --git a/public/app/features/alerting/unified/components/rules/RulesGroup.tsx b/public/app/features/alerting/unified/components/rules/RulesGroup.tsx
index be9c29a91f8..65f6e9b0284 100644
--- a/public/app/features/alerting/unified/components/rules/RulesGroup.tsx
+++ b/public/app/features/alerting/unified/components/rules/RulesGroup.tsx
@@ -43,13 +43,14 @@ export const RulesGroup: FC = React.memo(({ group, namespace, expandAll }
setIsCollapsed(!expandAll);
}, [expandAll]);
- const hasRuler = useHasRuler();
+ const { hasRuler, rulerRulesLoaded } = useHasRuler();
const rulerRule = group.rules[0]?.rulerRule;
const folderUID = (rulerRule && isGrafanaRulerRule(rulerRule) && rulerRule.grafana_alert.namespace_uid) || undefined;
const { folder } = useFolder(folderUID);
// group "is deleting" if rules source has ruler, but this group has no rules that are in ruler
- const isDeleting = hasRuler(rulesSource) && !group.rules.find((rule) => !!rule.rulerRule);
+ const isDeleting =
+ hasRuler(rulesSource) && rulerRulesLoaded(rulesSource) && !group.rules.find((rule) => !!rule.rulerRule);
const isFederated = isFederatedRuleGroup(group);
const deleteGroup = () => {
diff --git a/public/app/features/alerting/unified/components/rules/RulesTable.tsx b/public/app/features/alerting/unified/components/rules/RulesTable.tsx
index 6bc2dd398bb..ffd44e3f1d2 100644
--- a/public/app/features/alerting/unified/components/rules/RulesTable.tsx
+++ b/public/app/features/alerting/unified/components/rules/RulesTable.tsx
@@ -106,7 +106,7 @@ export const getStyles = (theme: GrafanaTheme2) => ({
});
function useColumns(showSummaryColumn: boolean, showGroupColumn: boolean) {
- const hasRuler = useHasRuler();
+ const { hasRuler, rulerRulesLoaded } = useHasRuler();
return useMemo((): RuleTableColumnProps[] => {
const columns: RuleTableColumnProps[] = [
@@ -118,8 +118,8 @@ function useColumns(showSummaryColumn: boolean, showGroupColumn: boolean) {
const { namespace } = rule;
const { rulesSource } = namespace;
const { promRule, rulerRule } = rule;
- const isDeleting = !!(hasRuler(rulesSource) && promRule && !rulerRule);
- const isCreating = !!(hasRuler(rulesSource) && rulerRule && !promRule);
+ const isDeleting = !!(hasRuler(rulesSource) && rulerRulesLoaded(rulesSource) && promRule && !rulerRule);
+ const isCreating = !!(hasRuler(rulesSource) && rulerRulesLoaded(rulesSource) && rulerRule && !promRule);
return ;
},
size: '165px',
@@ -188,5 +188,5 @@ function useColumns(showSummaryColumn: boolean, showGroupColumn: boolean) {
});
}
return columns;
- }, [hasRuler, showSummaryColumn, showGroupColumn]);
+ }, [hasRuler, rulerRulesLoaded, showSummaryColumn, showGroupColumn]);
}
diff --git a/public/app/features/alerting/unified/hooks/useHasRuler.ts b/public/app/features/alerting/unified/hooks/useHasRuler.ts
index fb4d2e5960c..d9451d4bb86 100644
--- a/public/app/features/alerting/unified/hooks/useHasRuler.ts
+++ b/public/app/features/alerting/unified/hooks/useHasRuler.ts
@@ -2,18 +2,31 @@ import { useCallback } from 'react';
import { RulesSource } from 'app/types/unified-alerting';
-import { GRAFANA_RULES_SOURCE_NAME } from '../utils/datasource';
+import { getRulesSourceName, GRAFANA_RULES_SOURCE_NAME } from '../utils/datasource';
import { useUnifiedAlertingSelector } from './useUnifiedAlertingSelector';
// datasource has ruler if it's grafana managed or if we're able to load rules from it
-export function useHasRuler(): (rulesSource: string | RulesSource) => boolean {
+export function useHasRuler() {
const rulerRules = useUnifiedAlertingSelector((state) => state.rulerRules);
- return useCallback(
+
+ const hasRuler = useCallback(
(rulesSource: string | RulesSource) => {
const rulesSourceName = typeof rulesSource === 'string' ? rulesSource : rulesSource.name;
return rulesSourceName === GRAFANA_RULES_SOURCE_NAME || !!rulerRules[rulesSourceName]?.result;
},
[rulerRules]
);
+
+ const rulerRulesLoaded = useCallback(
+ (rulesSource: RulesSource) => {
+ const rulesSourceName = getRulesSourceName(rulesSource);
+ const result = rulerRules[rulesSourceName]?.result;
+
+ return Boolean(result);
+ },
+ [rulerRules]
+ );
+
+ return { hasRuler, rulerRulesLoaded };
}
diff --git a/public/app/features/alerting/unified/state/actions.ts b/public/app/features/alerting/unified/state/actions.ts
index 6f32eabb1f3..2109a8ac98b 100644
--- a/public/app/features/alerting/unified/state/actions.ts
+++ b/public/app/features/alerting/unified/state/actions.ts
@@ -255,30 +255,34 @@ export const fetchRulesSourceBuildInfoAction = createAsyncThunk(
const dataSources: AsyncRequestMapSlice = (getState() as StoreState).unifiedAlerting
.dataSources;
const hasLoaded = Boolean(dataSources[rulesSourceName]?.result);
- return !hasLoaded;
+ const hasError = Boolean(dataSources[rulesSourceName]?.error);
+
+ return !(hasLoaded || hasError);
},
}
);
export function fetchAllPromAndRulerRulesAction(force = false): ThunkResult {
return async (dispatch, getStore) => {
- await dispatch(fetchAllPromBuildInfoAction());
+ return Promise.all(
+ getAllRulesSourceNames().map(async (rulesSourceName) => {
+ await dispatch(fetchRulesSourceBuildInfoAction({ rulesSourceName }));
- const { promRules, rulerRules, dataSources } = getStore().unifiedAlerting;
+ const { promRules, rulerRules, dataSources } = getStore().unifiedAlerting;
+ const dataSourceConfig = dataSources[rulesSourceName].result;
- getAllRulesSourceNames().map((rulesSourceName) => {
- const dataSourceConfig = dataSources[rulesSourceName].result;
- if (!dataSourceConfig) {
- return;
- }
+ if (!dataSourceConfig) {
+ return;
+ }
- if (force || !promRules[rulesSourceName]?.loading) {
- dispatch(fetchPromRulesAction({ rulesSourceName }));
- }
- if ((force || !rulerRules[rulesSourceName]?.loading) && dataSourceConfig.rulerConfig) {
- dispatch(fetchRulerRulesAction({ rulesSourceName }));
- }
- });
+ if (force || !promRules[rulesSourceName]?.loading) {
+ dispatch(fetchPromRulesAction({ rulesSourceName }));
+ }
+ if ((force || !rulerRules[rulesSourceName]?.loading) && dataSourceConfig.rulerConfig) {
+ dispatch(fetchRulerRulesAction({ rulesSourceName }));
+ }
+ })
+ );
};
}
diff --git a/public/app/features/alerting/unified/utils/amroutes.ts b/public/app/features/alerting/unified/utils/amroutes.ts
index 18f804aae39..57cde530c32 100644
--- a/public/app/features/alerting/unified/utils/amroutes.ts
+++ b/public/app/features/alerting/unified/utils/amroutes.ts
@@ -59,10 +59,20 @@ export const emptyArrayFieldMatcher: MatcherFieldValue = {
operator: MatcherOperator.equal,
};
+// Default route group_by labels for newly created routes.
+export const defaultGroupBy = ['grafana_folder', 'alertname'];
+
+// Common route group_by options for multiselect drop-down
+export const commonGroupByOptions = [
+ { label: 'grafana_folder', value: 'grafana_folder' },
+ { label: 'alertname', value: 'alertname' },
+ { label: 'Disable (...)', value: '...' },
+];
+
export const emptyRoute: FormAmRoute = {
id: '',
overrideGrouping: false,
- groupBy: [],
+ groupBy: defaultGroupBy,
object_matchers: [],
routes: [],
continue: false,
diff --git a/public/app/features/alerting/unified/utils/rule-form.ts b/public/app/features/alerting/unified/utils/rule-form.ts
index 7f7a1aeb77c..bd03002c23c 100644
--- a/public/app/features/alerting/unified/utils/rule-form.ts
+++ b/public/app/features/alerting/unified/utils/rule-form.ts
@@ -220,6 +220,7 @@ const getDefaultExpression = (refId: string): AlertQuery => {
},
},
],
+ expression: 'A',
};
return {
diff --git a/public/app/features/annotations/standardAnnotationSupport.ts b/public/app/features/annotations/standardAnnotationSupport.ts
index 8f4904abc7a..01d88a803f3 100644
--- a/public/app/features/annotations/standardAnnotationSupport.ts
+++ b/public/app/features/annotations/standardAnnotationSupport.ts
@@ -236,6 +236,7 @@ const legacyRunner = [
'loki',
'elasticsearch',
'grafana-opensearch-datasource', // external
+ 'grafana-splunk-datasource', // external
];
/**
diff --git a/public/app/features/api-keys/ApiKeysPage.tsx b/public/app/features/api-keys/ApiKeysPage.tsx
index 4cb20857620..5e738b09316 100644
--- a/public/app/features/api-keys/ApiKeysPage.tsx
+++ b/public/app/features/api-keys/ApiKeysPage.tsx
@@ -177,13 +177,8 @@ export class ApiKeysPageUnconnected extends PureComponent {
const showTable = apiKeysCount > 0;
return (
<>
- {/* TODO: remove feature flag check before GA */}
- {config.featureToggles.serviceAccounts && !apiKeysMigrated && (
-
- )}
- {config.featureToggles.serviceAccounts && apiKeysMigrated && (
-
- )}
+ {!apiKeysMigrated && }
+ {apiKeysMigrated && }
{showCTA ? (
= ({ apiKeys, timeZone, onDelete, onMigrate
- {config.featureToggles.serviceAccounts && (
- onMigrate(key)}>
- Migrate
-
- )}
+ onMigrate(key)}>
+ Migrate
+
{
export function getApiKeysMigrationStatus(): ThunkResult {
return async (dispatch) => {
- // TODO: remove when service account enabled by default (or use another way to detect if it's enabled)
- if (config.featureToggles.serviceAccounts) {
- const result = await getBackendSrv().get('/api/serviceaccounts/migrationstatus');
- dispatch(apiKeysMigrationStatusLoaded(!!result?.migrated));
- }
+ const result = await getBackendSrv().get('/api/serviceaccounts/migrationstatus');
+ dispatch(apiKeysMigrationStatusLoaded(!!result?.migrated));
};
}
diff --git a/public/app/features/canvas/runtime/ables.tsx b/public/app/features/canvas/runtime/ables.tsx
index 621e3afba8e..ae19c84f87a 100644
--- a/public/app/features/canvas/runtime/ables.tsx
+++ b/public/app/features/canvas/runtime/ables.tsx
@@ -44,11 +44,8 @@ export const constraintViewable = (scene: Scene) => ({
const rect = moveable.getRect();
const targetElement = scene.findElementByTarget(moveable.state.target);
- // If target is currently in motion or selection is more than 1 element don't display constraint visualizations
- if (
- targetElement?.isMoving ||
- (scene.selecto?.getSelectedTargets() && scene.selecto?.getSelectedTargets().length > 1)
- ) {
+ // If selection is more than 1 element don't display constraint visualizations
+ if (scene.selecto?.getSelectedTargets() && scene.selecto?.getSelectedTargets().length > 1) {
return;
}
diff --git a/public/app/features/canvas/runtime/element.tsx b/public/app/features/canvas/runtime/element.tsx
index cbc912264f2..3f9b2d95ae3 100644
--- a/public/app/features/canvas/runtime/element.tsx
+++ b/public/app/features/canvas/runtime/element.tsx
@@ -26,9 +26,6 @@ export class ElementState implements LayerElement {
sizeStyle: CSSProperties = {};
dataStyle: CSSProperties = {};
- // Determine whether or not element is in motion or not (via moveable)
- isMoving = false;
-
// Temp stored constraint for visualization purposes (switch to top / left constraint to simplify some functionality)
tempConstraint: Constraint | undefined;
diff --git a/public/app/features/canvas/runtime/frame.tsx b/public/app/features/canvas/runtime/frame.tsx
index cd1e1364d58..d53fd92d682 100644
--- a/public/app/features/canvas/runtime/frame.tsx
+++ b/public/app/features/canvas/runtime/frame.tsx
@@ -74,6 +74,19 @@ export class FrameState extends ElementState {
this.reinitializeMoveable();
}
+ // used for tree view
+ reorderTree(src: ElementState, dest: ElementState, firstPosition = false) {
+ const result = Array.from(this.elements);
+ const srcIndex = this.elements.indexOf(src);
+ const destIndex = firstPosition ? this.elements.length - 1 : this.elements.indexOf(dest);
+
+ const [removed] = result.splice(srcIndex, 1);
+ result.splice(destIndex, 0, removed);
+ this.elements = result;
+
+ this.reinitializeMoveable();
+ }
+
doMove(child: ElementState, action: LayerActionID) {
const vals = this.elements.filter((v) => v !== child);
if (action === LayerActionID.MoveBottom) {
@@ -176,7 +189,7 @@ export class FrameState extends ElementState {
render() {
return (
-
+
{this.elements.map((v) => v.render())}
);
diff --git a/public/app/features/canvas/runtime/scene.tsx b/public/app/features/canvas/runtime/scene.tsx
index a167a3992eb..3c4daee26d6 100644
--- a/public/app/features/canvas/runtime/scene.tsx
+++ b/public/app/features/canvas/runtime/scene.tsx
@@ -60,6 +60,7 @@ export class Scene {
currentLayer?: FrameState;
isEditingEnabled?: boolean;
skipNextSelectionBroadcast = false;
+ ignoreDataUpdate = false;
isPanelEditing = locationService.getSearchObject().editPanel !== undefined;
@@ -327,10 +328,10 @@ export class Scene {
this.selecto!.clickTarget(event.inputEvent, event.inputTarget);
})
.on('dragStart', (event) => {
- const targetedElement = this.findElementByTarget(event.target);
- if (targetedElement) {
- targetedElement.isMoving = true;
- }
+ this.ignoreDataUpdate = true;
+ })
+ .on('dragGroupStart', (event) => {
+ this.ignoreDataUpdate = true;
})
.on('drag', (event) => {
const targetedElement = this.findElementByTarget(event.target);
@@ -342,14 +343,25 @@ export class Scene {
targetedElement!.applyDrag(event);
});
})
+ .on('dragGroupEnd', (e) => {
+ e.events.forEach((event) => {
+ const targetedElement = this.findElementByTarget(event.target);
+ if (targetedElement) {
+ targetedElement.setPlacementFromConstraint();
+ }
+ });
+
+ this.moved.next(Date.now());
+ this.ignoreDataUpdate = false;
+ })
.on('dragEnd', (event) => {
const targetedElement = this.findElementByTarget(event.target);
if (targetedElement) {
targetedElement.setPlacementFromConstraint();
- targetedElement.isMoving = false;
}
this.moved.next(Date.now());
+ this.ignoreDataUpdate = false;
})
.on('resizeStart', (event) => {
const targetedElement = this.findElementByTarget(event.target);
@@ -417,6 +429,65 @@ export class Scene {
});
};
+ reorderElements = (src: ElementState, dest: ElementState, dragToGap: boolean, destPosition: number) => {
+ switch (dragToGap) {
+ case true:
+ switch (destPosition) {
+ case -1:
+ // top of the tree
+ if (src.parent instanceof FrameState) {
+ // move outside the frame
+ if (dest.parent) {
+ this.updateElements(src, dest.parent, dest.parent.elements.length);
+ src.updateData(dest.parent.scene.context);
+ }
+ } else {
+ dest.parent?.reorderTree(src, dest, true);
+ }
+ break;
+ default:
+ if (dest.parent) {
+ this.updateElements(src, dest.parent, dest.parent.elements.indexOf(dest));
+ src.updateData(dest.parent.scene.context);
+ }
+ break;
+ }
+ break;
+ case false:
+ if (dest instanceof FrameState) {
+ if (src.parent === dest) {
+ // same frame parent
+ src.parent?.reorderTree(src, dest, true);
+ } else {
+ this.updateElements(src, dest);
+ src.updateData(dest.scene.context);
+ }
+ } else if (src.parent === dest.parent) {
+ src.parent?.reorderTree(src, dest);
+ } else {
+ if (dest.parent) {
+ this.updateElements(src, dest.parent);
+ src.updateData(dest.parent.scene.context);
+ }
+ }
+ break;
+ }
+ };
+
+ private updateElements = (src: ElementState, dest: FrameState | RootElement, idx: number | null = null) => {
+ src.parent?.doAction(LayerActionID.Delete, src);
+ src.parent = dest;
+
+ const elementContainer = src.div?.getBoundingClientRect();
+ src.setPlacementFromConstraint(elementContainer, dest.div?.getBoundingClientRect());
+
+ const destIndex = idx ?? dest.elements.length - 1;
+ dest.elements.splice(destIndex, 0, src);
+ dest.scene.save();
+
+ dest.reinitializeMoveable();
+ };
+
render() {
const canShowContextMenu = this.isPanelEditing || (!this.isPanelEditing && this.isEditingEnabled);
diff --git a/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.test.tsx b/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.test.tsx
index 2fc2e330276..3288b4fa7ee 100644
--- a/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.test.tsx
+++ b/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.test.tsx
@@ -96,10 +96,28 @@ describe('SharePublic', () => {
await screen.findByText('Welcome to Grafana public dashboards alpha!');
});
- // test when checkboxes show up
- // test checkboxes hidden
- // test url hidden
- // test url shows up
- //
+ it('renders default time in inputs', async () => {
+ config.featureToggles.publicDashboards = true;
+ const mockDashboard = new DashboardModel({
+ uid: 'mockDashboardUid',
+ });
+ const mockPanel = new PanelModel({
+ id: 'mockPanelId',
+ });
+
+ expect(mockDashboard.time).toEqual({ from: 'now-6h', to: 'now' });
+ //@ts-ignore
+ mockDashboard.originalTime = { from: 'test-from', to: 'test-to' };
+
+ render(
{}} />);
+
+ await waitFor(() => screen.getByText('Link'));
+ fireEvent.click(screen.getByText('Public Dashboard'));
+
+ await screen.findByText('Welcome to Grafana public dashboards alpha!');
+ expect(screen.getByDisplayValue('test-from')).toBeInTheDocument();
+ expect(screen.getByDisplayValue('test-to')).toBeInTheDocument();
+ });
+
// test checking if current version of dashboard in state is persisted to db
});
diff --git a/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.tsx b/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.tsx
index 6c42a952944..8686fe66950 100644
--- a/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.tsx
+++ b/public/app/features/dashboard/components/ShareModal/SharePublicDashboard.tsx
@@ -2,7 +2,18 @@ import React, { useCallback, useEffect, useState } from 'react';
import { AppEvents } from '@grafana/data';
import { reportInteraction } from '@grafana/runtime/src';
-import { Alert, Button, Checkbox, ClipboardButton, Field, FieldSet, Input, LinkButton, Switch } from '@grafana/ui';
+import {
+ Alert,
+ Button,
+ Checkbox,
+ ClipboardButton,
+ Field,
+ FieldSet,
+ Input,
+ Label,
+ LinkButton,
+ Switch,
+} from '@grafana/ui';
import { notifyApp } from 'app/core/actions';
import { createErrorNotification } from 'app/core/copy/appNotification';
import { appEvents } from 'app/core/core';
@@ -160,18 +171,19 @@ export const SharePublicDashboard = (props: Props) => {
Public Dashboard Configuration
- Time Range
-
+
+ Time Range
+
From:
}
/>
To:
diff --git a/public/app/features/dashboard/utils/getPanelMenu.ts b/public/app/features/dashboard/utils/getPanelMenu.ts
index f5921d734b3..ef99116806e 100644
--- a/public/app/features/dashboard/utils/getPanelMenu.ts
+++ b/public/app/features/dashboard/utils/getPanelMenu.ts
@@ -1,3 +1,5 @@
+import { t } from '@lingui/macro';
+
import { PanelMenuItem } from '@grafana/data';
import { AngularComponent, getDataSourceSrv, locationService } from '@grafana/runtime';
import { PanelCtrl } from 'app/angular/panel/panel_ctrl';
@@ -90,8 +92,12 @@ export function getPanelMenu(
const menu: PanelMenuItem[] = [];
if (!panel.isEditing) {
+ const viewTextTranslation = t({
+ id: 'panel.header-menu.view',
+ message: `View`,
+ });
menu.push({
- text: 'View',
+ text: viewTextTranslation,
iconClassName: 'eye',
onClick: onViewPanel,
shortcut: 'v',
@@ -107,8 +113,13 @@ export function getPanelMenu(
});
}
+ const shareTextTranslation = t({
+ id: 'panel.header-menu.share',
+ message: `Share`,
+ });
+
menu.push({
- text: 'Share',
+ text: shareTextTranslation,
iconClassName: 'share-alt',
onClick: onSharePanel,
shortcut: 'p s',
@@ -127,8 +138,13 @@ export function getPanelMenu(
// Only show these inspect actions for data plugins
if (panel.plugin && !panel.plugin.meta.skipDataQuery) {
+ const dataTextTranslation = t({
+ id: 'panel.header-menu.inspect-data',
+ message: `Data`,
+ });
+
inspectMenu.push({
- text: 'Data',
+ text: dataTextTranslation,
onClick: (e: React.MouseEvent
) => onInspectPanel('data'),
});
@@ -140,14 +156,23 @@ export function getPanelMenu(
}
}
+ const jsonTextTranslation = t({
+ id: 'panel.header-menu.inspect-json',
+ message: `Panel JSON`,
+ });
+
inspectMenu.push({
- text: 'Panel JSON',
+ text: jsonTextTranslation,
onClick: (e: React.MouseEvent) => onInspectPanel('json'),
});
+ const inspectTextTranslation = t({
+ id: 'panel.header-menu.inspect',
+ message: `Inspect`,
+ });
menu.push({
type: 'submenu',
- text: 'Inspect',
+ text: inspectTextTranslation,
iconClassName: 'info-circle',
onClick: (e: React.MouseEvent) => onInspectPanel(),
shortcut: 'i',
@@ -205,9 +230,13 @@ export function getPanelMenu(
}
if (!panel.isEditing && subMenu.length) {
+ const moreTextTranslation = t({
+ id: 'panel.header-menu.more',
+ message: `More...`,
+ });
menu.push({
type: 'submenu',
- text: 'More...',
+ text: moreTextTranslation,
iconClassName: 'cube',
subMenu,
onClick: onMore,
diff --git a/public/app/features/explore/Explore.tsx b/public/app/features/explore/Explore.tsx
index 2987f65b1cf..4d0a513972b 100644
--- a/public/app/features/explore/Explore.tsx
+++ b/public/app/features/explore/Explore.tsx
@@ -280,12 +280,15 @@ export class Explore extends React.PureComponent {
}
renderNodeGraphPanel() {
- const { exploreId, showTrace, queryResponse } = this.props;
+ const { exploreId, showTrace, queryResponse, datasourceInstance } = this.props;
+ const datasourceType = datasourceInstance ? datasourceInstance?.type : 'unknown';
+
return (
);
}
diff --git a/public/app/features/explore/ExplorePaneContainer.tsx b/public/app/features/explore/ExplorePaneContainer.tsx
index 83dd8bb028b..0e7dc124a1f 100644
--- a/public/app/features/explore/ExplorePaneContainer.tsx
+++ b/public/app/features/explore/ExplorePaneContainer.tsx
@@ -1,9 +1,11 @@
+import { css, cx } from '@emotion/css';
import memoizeOne from 'memoize-one';
import React from 'react';
import { connect, ConnectedProps } from 'react-redux';
-import { DataQuery, ExploreUrlState, EventBusExtended, EventBusSrv } from '@grafana/data';
+import { DataQuery, ExploreUrlState, EventBusExtended, EventBusSrv, GrafanaTheme2 } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
+import { Themeable2, withTheme2 } from '@grafana/ui';
import store from 'app/core/store';
import {
DEFAULT_RANGE,
@@ -22,7 +24,23 @@ import Explore from './Explore';
import { initializeExplore, refreshExplore } from './state/explorePane';
import { lastSavedUrl, cleanupPaneAction } from './state/main';
-interface OwnProps {
+const getStyles = (theme: GrafanaTheme2) => {
+ return {
+ explore: css`
+ display: flex;
+ flex: 1 1 auto;
+ flex-direction: column;
+ & + & {
+ border-left: 1px dotted ${theme.colors.border.medium};
+ }
+ `,
+ exploreSplit: css`
+ width: 50%;
+ `,
+ };
+};
+
+interface OwnProps extends Themeable2 {
exploreId: ExploreId;
urlQuery: string;
split: boolean;
@@ -87,10 +105,12 @@ class ExplorePaneContainerUnconnected extends React.PureComponent {
};
render() {
- const exploreClass = this.props.split ? 'explore explore-split' : 'explore';
+ const { theme, split, exploreId, initialized } = this.props;
+ const styles = getStyles(theme);
+ const exploreClass = cx(styles.explore, split && styles.exploreSplit);
return (
- {this.props.initialized && }
+ {initialized && }
);
}
@@ -128,4 +148,4 @@ const mapDispatchToProps = {
const connector = connect(mapStateToProps, mapDispatchToProps);
-export const ExplorePaneContainer = connector(ExplorePaneContainerUnconnected);
+export const ExplorePaneContainer = withTheme2(connector(ExplorePaneContainerUnconnected));
diff --git a/public/app/features/explore/NodeGraphContainer.test.tsx b/public/app/features/explore/NodeGraphContainer.test.tsx
index f05619fe848..e9bd95d2393 100644
--- a/public/app/features/explore/NodeGraphContainer.test.tsx
+++ b/public/app/features/explore/NodeGraphContainer.test.tsx
@@ -16,6 +16,7 @@ describe('NodeGraphContainer', () => {
range={getDefaultTimeRange()}
splitOpen={(() => {}) as any}
withTraceView={true}
+ datasourceType={''}
/>
);
@@ -30,6 +31,7 @@ describe('NodeGraphContainer', () => {
exploreId={ExploreId.left}
range={getDefaultTimeRange()}
splitOpen={(() => {}) as any}
+ datasourceType={''}
/>
);
diff --git a/public/app/features/explore/NodeGraphContainer.tsx b/public/app/features/explore/NodeGraphContainer.tsx
index b815aa522fd..40e3f588942 100644
--- a/public/app/features/explore/NodeGraphContainer.tsx
+++ b/public/app/features/explore/NodeGraphContainer.tsx
@@ -4,6 +4,7 @@ import { connect, ConnectedProps } from 'react-redux';
import { useToggle, useWindowSize } from 'react-use';
import { applyFieldOverrides, DataFrame, GrafanaTheme2 } from '@grafana/data';
+import { reportInteraction } from '@grafana/runtime';
import { Badge, Collapse, useStyles2, useTheme2 } from '@grafana/ui';
import { NodeGraph } from '../../plugins/panel/nodeGraph';
@@ -27,12 +28,13 @@ interface OwnProps {
exploreId: ExploreId;
// When showing the node graph together with trace view we do some changes so it works better.
withTraceView?: boolean;
+ datasourceType: string;
}
type Props = OwnProps & ConnectedProps;
export function UnconnectedNodeGraphContainer(props: Props) {
- const { dataFrames, range, splitOpen, withTraceView } = props;
+ const { dataFrames, range, splitOpen, withTraceView, datasourceType } = props;
const getLinks = useLinks(range, splitOpen);
const theme = useTheme2();
const styles = useStyles2(getStyles);
@@ -53,6 +55,13 @@ export function UnconnectedNodeGraphContainer(props: Props) {
const { nodes } = useCategorizeFrames(frames);
const [open, toggleOpen] = useToggle(false);
+ const toggled = () => {
+ toggleOpen();
+ reportInteraction('grafana_traces_node_graph_panel_clicked', {
+ datasourceType: datasourceType,
+ expanded: !open,
+ });
+ };
// Calculate node graph height based on window and top position, with some padding
const { height: windowHeight } = useWindowSize();
@@ -82,7 +91,7 @@ export function UnconnectedNodeGraphContainer(props: Props) {
collapsible={withTraceView}
// We allow collapsing this only when it is shown together with trace view.
isOpen={withTraceView ? open : true}
- onToggle={withTraceView ? () => toggleOpen() : undefined}
+ onToggle={withTraceView ? () => toggled() : undefined}
>
setSlim(!slim), [slim]);
const timeZone = useSelector((state: StoreState) => getTimeZone(state.user));
+ const datasourceType = datasource ? datasource?.type : 'unknown';
return (
<>
@@ -162,6 +163,7 @@ export function TraceView(props: Props) {
scrollToFirstVisibleSpan={noop}
findMatchesIDs={spanFindMatches}
trace={traceProp}
+ datasourceType={datasourceType}
spanBarOptions={spanBarOptions?.spanBar}
traceTimeline={traceTimeline}
updateNextViewRangeTime={updateNextViewRangeTime}
diff --git a/public/app/features/explore/TraceView/TraceViewContainer.test.tsx b/public/app/features/explore/TraceView/TraceViewContainer.test.tsx
index be29de1c3f5..8766896cb1c 100644
--- a/public/app/features/explore/TraceView/TraceViewContainer.test.tsx
+++ b/public/app/features/explore/TraceView/TraceViewContainer.test.tsx
@@ -11,6 +11,13 @@ import { configureStore } from '../../../store/configureStore';
import { frameOld } from './TraceView.test';
import { TraceViewContainer } from './TraceViewContainer';
+jest.mock('@grafana/runtime', () => {
+ return {
+ ...jest.requireActual('@grafana/runtime'),
+ reportInteraction: jest.fn(),
+ };
+});
+
function renderTraceViewContainer(frames = [frameOld]) {
const store = configureStore();
const mockPanelData = {
diff --git a/public/app/features/explore/Wrapper.tsx b/public/app/features/explore/Wrapper.tsx
index 6afa7dd29d1..c824a3a558c 100644
--- a/public/app/features/explore/Wrapper.tsx
+++ b/public/app/features/explore/Wrapper.tsx
@@ -1,3 +1,4 @@
+import { css } from '@emotion/css';
import React, { PureComponent } from 'react';
import { connect, ConnectedProps } from 'react-redux';
@@ -14,6 +15,18 @@ import { ExploreActions } from './ExploreActions';
import { ExplorePaneContainer } from './ExplorePaneContainer';
import { lastSavedUrl, resetExploreAction, richHistoryUpdatedAction } from './state/main';
+const styles = {
+ pageScrollbarWrapper: css`
+ width: 100%;
+ flex-grow: 1;
+ min-height: 0;
+ `,
+ exploreWrapper: css`
+ display: flex;
+ height: 100%;
+ `,
+};
+
interface RouteProps extends GrafanaRouteComponentProps<{}, ExploreQueryParams> {}
interface OwnProps {}
@@ -71,9 +84,9 @@ class WrapperUnconnected extends PureComponent
{
const hasSplit = Boolean(left) && Boolean(right);
return (
-
+
-
+
diff --git a/public/app/features/expressions/ExpressionDatasource.ts b/public/app/features/expressions/ExpressionDatasource.ts
index 5376e396377..7c8de0874d9 100644
--- a/public/app/features/expressions/ExpressionDatasource.ts
+++ b/public/app/features/expressions/ExpressionDatasource.ts
@@ -53,9 +53,9 @@ export class ExpressionDatasourceApi extends DataSourceWithBackend
): ExpressionQuery {
return {
refId: '--', // Replaced with query
- type: query?.type ?? ExpressionQueryType.math,
datasource: ExpressionDatasourceRef,
- conditions: query?.conditions ?? undefined,
+ type: query?.type ?? ExpressionQueryType.math,
+ ...query,
};
}
}
diff --git a/public/app/features/expressions/ExpressionQueryEditor.tsx b/public/app/features/expressions/ExpressionQueryEditor.tsx
index 618bdf93787..66c21d60ccd 100644
--- a/public/app/features/expressions/ExpressionQueryEditor.tsx
+++ b/public/app/features/expressions/ExpressionQueryEditor.tsx
@@ -1,4 +1,4 @@
-import React, { PureComponent } from 'react';
+import React, { useCallback, useEffect, useRef } from 'react';
import { DataSourceApi, QueryEditorProps, SelectableValue } from '@grafana/data';
import { InlineField, Select } from '@grafana/ui';
@@ -13,15 +13,61 @@ import { getDefaults } from './utils/expressionTypes';
type Props = QueryEditorProps, ExpressionQuery>;
const labelWidth = 14;
-export class ExpressionQueryEditor extends PureComponent {
- onSelectExpressionType = (item: SelectableValue) => {
- const { query, onChange } = this.props;
- onChange(getDefaults({ ...query, type: item.value! }));
- };
+type NonClassicExpressionType = Exclude;
+type ExpressionTypeConfigStorage = Partial>;
+
+function useExpressionsCache() {
+ const expressionCache = useRef({});
+
+ const getCachedExpression = useCallback((queryType: ExpressionQueryType) => {
+ switch (queryType) {
+ case ExpressionQueryType.math:
+ case ExpressionQueryType.reduce:
+ case ExpressionQueryType.resample:
+ return expressionCache.current[queryType];
+ case ExpressionQueryType.classic:
+ return undefined;
+ }
+ }, []);
+
+ const setCachedExpression = useCallback((queryType: ExpressionQueryType, value: string | undefined) => {
+ switch (queryType) {
+ case ExpressionQueryType.math:
+ expressionCache.current.math = value;
+ break;
- renderExpressionType() {
- const { onChange, onRunQuery, query, queries } = this.props;
+ // We want to use the same value for Reduce and Resample
+ case ExpressionQueryType.reduce:
+ case ExpressionQueryType.resample:
+ expressionCache.current.reduce = value;
+ expressionCache.current.resample = value;
+ break;
+ }
+ }, []);
+
+ return { getCachedExpression, setCachedExpression };
+}
+
+export function ExpressionQueryEditor(props: Props) {
+ const { query, queries, onRunQuery, onChange } = props;
+ const { getCachedExpression, setCachedExpression } = useExpressionsCache();
+
+ useEffect(() => {
+ setCachedExpression(query.type, query.expression);
+ }, [query.expression, query.type, setCachedExpression]);
+
+ const onSelectExpressionType = useCallback(
+ (item: SelectableValue) => {
+ const cachedExpression = getCachedExpression(item.value!);
+ const defaults = getDefaults({ ...query, type: item.value! });
+
+ onChange({ ...defaults, expression: cachedExpression ?? defaults.expression });
+ },
+ [query, onChange, getCachedExpression]
+ );
+
+ const renderExpressionType = () => {
const refIds = queries!.filter((q) => query.refId !== q.refId).map((q) => ({ value: q.refId, label: q.refId }));
switch (query.type) {
@@ -37,19 +83,16 @@ export class ExpressionQueryEditor extends PureComponent {
case ExpressionQueryType.classic:
return ;
}
- }
-
- render() {
- const { query } = this.props;
- const selected = gelTypes.find((o) => o.value === query.type);
-
- return (
-
-
-
-
- {this.renderExpressionType()}
-
- );
- }
+ };
+
+ const selected = gelTypes.find((o) => o.value === query.type);
+
+ return (
+
+
+
+
+ {renderExpressionType()}
+
+ );
}
diff --git a/public/app/features/expressions/components/ClassicConditions.tsx b/public/app/features/expressions/components/ClassicConditions.tsx
index 6496702d88f..3b7c8805ef4 100644
--- a/public/app/features/expressions/components/ClassicConditions.tsx
+++ b/public/app/features/expressions/components/ClassicConditions.tsx
@@ -26,9 +26,12 @@ export const ClassicConditions: FC = ({ onChange, query, refIds }) => {
const onAddCondition = () => {
if (query.conditions) {
+ const lastParams = query.conditions.at(-1)?.query?.params ?? [];
+ const newCondition: ClassicCondition = { ...defaultCondition, query: { params: lastParams } };
+
onChange({
...query,
- conditions: query.conditions.length > 0 ? [...query.conditions, defaultCondition] : [defaultCondition],
+ conditions: query.conditions.length > 0 ? [...query.conditions, newCondition] : [newCondition],
});
}
};
diff --git a/public/app/features/expressions/utils/expressionTypes.ts b/public/app/features/expressions/utils/expressionTypes.ts
index d3e3665963b..27c8b2c5941 100644
--- a/public/app/features/expressions/utils/expressionTypes.ts
+++ b/public/app/features/expressions/utils/expressionTypes.ts
@@ -9,7 +9,7 @@ export const getDefaults = (query: ExpressionQuery) => {
if (!query.reducer) {
query.reducer = ReducerID.mean;
}
- query.expression = undefined;
+
break;
case ExpressionQueryType.resample:
@@ -24,10 +24,15 @@ export const getDefaults = (query: ExpressionQuery) => {
query.reducer = undefined;
break;
+ case ExpressionQueryType.math:
+ query.expression = undefined;
+ break;
+
case ExpressionQueryType.classic:
if (!query.conditions) {
query.conditions = [defaultCondition];
}
+
break;
default:
diff --git a/public/app/features/notifications/NotificationsPage.tsx b/public/app/features/notifications/NotificationsPage.tsx
index 22c64301384..ee3ace6faed 100644
--- a/public/app/features/notifications/NotificationsPage.tsx
+++ b/public/app/features/notifications/NotificationsPage.tsx
@@ -1,27 +1,12 @@
import React from 'react';
-import { connect, ConnectedProps } from 'react-redux';
import { Page } from 'app/core/components/Page/Page';
-import { GrafanaRouteComponentProps } from '../../core/navigation/types';
-import { getNavModel } from '../../core/selectors/navModel';
-import { StoreState } from '../../types';
-
import { StoredNotifications } from './StoredNotifications';
-const mapStateToProps = (state: StoreState) => ({
- navModel: getNavModel(state.navIndex, 'notifications'),
-});
-
-const connector = connect(mapStateToProps, undefined);
-
-interface OwnProps extends GrafanaRouteComponentProps {}
-
-type Props = OwnProps & ConnectedProps;
-
-export const NotificationsPage = ({ navModel }: Props) => {
+export const NotificationsPage = () => {
return (
-
+
@@ -29,4 +14,4 @@ export const NotificationsPage = ({ navModel }: Props) => {
);
};
-export default connect(mapStateToProps)(NotificationsPage);
+export default NotificationsPage;
diff --git a/public/app/features/plugins/sql/components/configuration/ConnectionLimits.tsx b/public/app/features/plugins/sql/components/configuration/ConnectionLimits.tsx
new file mode 100644
index 00000000000..5b9dad73c73
--- /dev/null
+++ b/public/app/features/plugins/sql/components/configuration/ConnectionLimits.tsx
@@ -0,0 +1,75 @@
+import React from 'react';
+
+import { FieldSet, InlineField } from '@grafana/ui';
+import { NumberInput } from 'app/core/components/OptionsUI/NumberInput';
+
+import { SQLConnectionLimits } from './types';
+
+interface Props {
+ onPropertyChanged: (property: keyof T, value?: number) => void;
+ labelWidth: number;
+ jsonData: SQLConnectionLimits;
+}
+
+export const ConnectionLimits = (props: Props) => {
+ const { onPropertyChanged, labelWidth, jsonData } = props;
+
+ const onJSONDataNumberChanged = (property: keyof SQLConnectionLimits) => {
+ return (number?: number) => {
+ if (onPropertyChanged) {
+ onPropertyChanged(property, number);
+ }
+ };
+ };
+
+ return (
+
+
+ The maximum number of open connections to the database.If Max idle connections is greater than 0 and
+ the Max open connections is less than Max idle connections , then
+ Max idle connections will be reduced to match the Max open connections limit. If set to 0,
+ there is no limit on the number of open connections.
+
+ }
+ labelWidth={labelWidth}
+ label="Max open"
+ >
+
+
+
+ The maximum number of connections in the idle connection pool.If Max open connections is greater than
+ 0 but less than the Max idle connections , then the Max idle connections will be reduced to
+ match the Max open connections limit. If set to 0, no idle connections are retained.
+
+ }
+ labelWidth={labelWidth}
+ label="Max idle"
+ >
+
+
+
+
+
+
+ );
+};
diff --git a/public/app/features/plugins/sql/components/configuration/TLSSecretsConfig.tsx b/public/app/features/plugins/sql/components/configuration/TLSSecretsConfig.tsx
new file mode 100644
index 00000000000..e3912bf0ff3
--- /dev/null
+++ b/public/app/features/plugins/sql/components/configuration/TLSSecretsConfig.tsx
@@ -0,0 +1,77 @@
+import React from 'react';
+
+import {
+ DataSourceJsonData,
+ DataSourcePluginOptionsEditorProps,
+ KeyValue,
+ onUpdateDatasourceSecureJsonDataOption,
+ updateDatasourcePluginResetOption,
+} from '@grafana/data';
+import { InlineField, SecretTextArea } from '@grafana/ui';
+
+export interface Props {
+ editorProps: DataSourcePluginOptionsEditorProps;
+ showCACert?: boolean;
+ secureJsonFields?: KeyValue;
+ labelWidth?: number;
+}
+
+export const TLSSecretsConfig = (props: Props) => {
+ const { labelWidth, editorProps, showCACert } = props;
+ const { secureJsonFields } = editorProps.options;
+ return (
+ <>
+ To authenticate with an TLS/SSL client certificate, provide the client certificate here.}
+ labelWidth={labelWidth}
+ label="TLS/SSL Client Certificate"
+ >
+ {
+ updateDatasourcePluginResetOption(editorProps, 'tlsClientCert');
+ }}
+ >
+
+ {showCACert ? (
+ If the selected TLS/SSL mode requires a server root certificate, provide it here.}
+ labelWidth={labelWidth}
+ label="TLS/SSL Root Certificate"
+ >
+ {
+ updateDatasourcePluginResetOption(editorProps, 'tlsCACert');
+ }}
+ >
+
+ ) : null}
+
+ To authenticate with a client TLS/SSL certificate, provide the key here.}
+ labelWidth={labelWidth}
+ label="TLS/SSL Client Key"
+ >
+ {
+ updateDatasourcePluginResetOption(editorProps, 'tlsClientKey');
+ }}
+ >
+
+ >
+ );
+};
diff --git a/public/app/features/plugins/sql/components/configuration/types.ts b/public/app/features/plugins/sql/components/configuration/types.ts
new file mode 100644
index 00000000000..00678993762
--- /dev/null
+++ b/public/app/features/plugins/sql/components/configuration/types.ts
@@ -0,0 +1,5 @@
+export interface SQLConnectionLimits {
+ maxOpenConns: number;
+ maxIdleConns: number;
+ connMaxLifetime: number;
+}
diff --git a/public/app/features/profile/ChangePasswordPage.test.tsx b/public/app/features/profile/ChangePasswordPage.test.tsx
index 2c6e15e39d9..4506dfdc361 100644
--- a/public/app/features/profile/ChangePasswordPage.test.tsx
+++ b/public/app/features/profile/ChangePasswordPage.test.tsx
@@ -4,7 +4,6 @@ import React from 'react';
import config from 'app/core/config';
-import { getNavModel } from '../../core/selectors/navModel';
import { backendSrv } from '../../core/services/backend_srv';
import { Props, ChangePasswordPage } from './ChangePasswordPage';
@@ -22,23 +21,6 @@ const defaultProps: Props = {
orgId: 0,
authLabels: ['github'],
},
- navModel: getNavModel(
- {
- 'profile-settings': {
- icon: 'sliders-v-alt',
- id: 'profile-settings',
- parentItem: {
- id: 'profile',
- text: 'Test User',
- img: '/avatar/46d229b033af06a191ff2267bca9ae56',
- url: '/profile',
- },
- text: 'Preferences',
- url: '/profile',
- },
- },
- 'profile-settings'
- ),
loadUser: jest.fn(),
changePassword: jest.fn(),
};
diff --git a/public/app/features/profile/ChangePasswordPage.tsx b/public/app/features/profile/ChangePasswordPage.tsx
index b375c512e9d..f6900e56e82 100644
--- a/public/app/features/profile/ChangePasswordPage.tsx
+++ b/public/app/features/profile/ChangePasswordPage.tsx
@@ -2,23 +2,18 @@ import React from 'react';
import { connect, ConnectedProps } from 'react-redux';
import { useMount } from 'react-use';
-import { NavModel } from '@grafana/data';
import { Page } from 'app/core/components/Page/Page';
-import { getNavModel } from 'app/core/selectors/navModel';
import { StoreState } from 'app/types';
import { ChangePasswordForm } from './ChangePasswordForm';
import { changePassword, loadUser } from './state/actions';
-export interface OwnProps {
- navModel: NavModel;
-}
+export interface OwnProps {}
function mapStateToProps(state: StoreState) {
const userState = state.user;
const { isUpdating, user } = userState;
return {
- navModel: getNavModel(state.navIndex, `change-password`),
isUpdating,
user,
};
@@ -33,11 +28,11 @@ const connector = connect(mapStateToProps, mapDispatchToProps);
export type Props = OwnProps & ConnectedProps;
-export function ChangePasswordPage({ navModel, loadUser, isUpdating, user, changePassword }: Props) {
+export function ChangePasswordPage({ loadUser, isUpdating, user, changePassword }: Props) {
useMount(() => loadUser());
return (
-
+
{user ? (
<>
diff --git a/public/app/features/profile/UserProfileEditPage.test.tsx b/public/app/features/profile/UserProfileEditPage.test.tsx
index 905c9d4f77c..d2f1ed7613a 100644
--- a/public/app/features/profile/UserProfileEditPage.test.tsx
+++ b/public/app/features/profile/UserProfileEditPage.test.tsx
@@ -7,7 +7,6 @@ import { OrgRole } from '@grafana/data';
import { selectors } from '@grafana/e2e-selectors';
import TestProvider from '../../../test/helpers/TestProvider';
-import { getNavModel } from '../../core/selectors/navModel';
import { backendSrv } from '../../core/services/backend_srv';
import { TeamPermissionLevel } from '../../types';
@@ -66,23 +65,6 @@ const defaultProps: Props = {
seenAt: new Date().toUTCString(),
},
],
- navModel: getNavModel(
- {
- 'profile-settings': {
- icon: 'sliders-v-alt',
- id: 'profile-settings',
- parentItem: {
- id: 'profile',
- text: 'Test User',
- img: '/avatar/46d229b033af06a191ff2267bca9ae56',
- url: '/profile',
- },
- text: 'Preferences',
- url: '/profile',
- },
- },
- 'profile-settings'
- ),
initUserProfilePage: jest.fn().mockResolvedValue(undefined),
revokeUserSession: jest.fn().mockResolvedValue(undefined),
changeUserOrg: jest.fn().mockResolvedValue(undefined),
diff --git a/public/app/features/profile/UserProfileEditPage.tsx b/public/app/features/profile/UserProfileEditPage.tsx
index 446829834ad..967dc8afa2c 100644
--- a/public/app/features/profile/UserProfileEditPage.tsx
+++ b/public/app/features/profile/UserProfileEditPage.tsx
@@ -2,11 +2,9 @@ import React from 'react';
import { connect, ConnectedProps } from 'react-redux';
import { useMount } from 'react-use';
-import { NavModel } from '@grafana/data';
import { VerticalGroup } from '@grafana/ui';
import { Page } from 'app/core/components/Page/Page';
import SharedPreferences from 'app/core/components/SharedPreferences/SharedPreferences';
-import { getNavModel } from 'app/core/selectors/navModel';
import { StoreState } from 'app/types';
import UserOrganizations from './UserOrganizations';
@@ -15,15 +13,12 @@ import UserSessions from './UserSessions';
import { UserTeams } from './UserTeams';
import { changeUserOrg, initUserProfilePage, revokeUserSession, updateUserProfile } from './state/actions';
-export interface OwnProps {
- navModel: NavModel;
-}
+export interface OwnProps {}
function mapStateToProps(state: StoreState) {
const userState = state.user;
const { user, teams, orgs, sessions, teamsAreLoading, orgsAreLoading, sessionsAreLoading, isUpdating } = userState;
return {
- navModel: getNavModel(state.navIndex, 'profile-settings'),
orgsAreLoading,
sessionsAreLoading,
teamsAreLoading,
@@ -47,7 +42,6 @@ const connector = connect(mapStateToProps, mapDispatchToProps);
export type Props = OwnProps & ConnectedProps;
export function UserProfileEditPage({
- navModel,
orgsAreLoading,
sessionsAreLoading,
teamsAreLoading,
@@ -64,7 +58,7 @@ export function UserProfileEditPage({
useMount(() => initUserProfilePage());
return (
-
+
diff --git a/public/app/features/profile/routes.tsx b/public/app/features/profile/routes.tsx
index c7939989eb7..a473b05050c 100644
--- a/public/app/features/profile/routes.tsx
+++ b/public/app/features/profile/routes.tsx
@@ -23,6 +23,12 @@ const profileRoutes: RouteDescriptor[] = [
() => import(/* webpackChunkName: "SelectOrgPage" */ 'app/features/org/SelectOrgPage')
),
},
+ {
+ path: '/profile/notifications',
+ component: SafeDynamicImport(
+ () => import(/* webpackChunkName: "NotificationsPage"*/ 'app/features/notifications/NotificationsPage')
+ ),
+ },
];
export function getProfileRoutes(cfg = config): RouteDescriptor[] {
diff --git a/public/app/features/storage/ExportView.tsx b/public/app/features/storage/ExportView.tsx
index 77bec1490c5..beefbc99f6f 100644
--- a/public/app/features/storage/ExportView.tsx
+++ b/public/app/features/storage/ExportView.tsx
@@ -1,9 +1,22 @@
-import React, { useEffect, useState } from 'react';
-import { useLocalStorage } from 'react-use';
+import React, { useEffect, useState, useCallback } from 'react';
+import { useAsync, useLocalStorage } from 'react-use';
-import { isLiveChannelMessageEvent, isLiveChannelStatusEvent, LiveChannelScope } from '@grafana/data';
+import { isLiveChannelMessageEvent, isLiveChannelStatusEvent, LiveChannelScope, SelectableValue } from '@grafana/data';
import { getBackendSrv, getGrafanaLiveSrv } from '@grafana/runtime';
-import { Button, CodeEditor, HorizontalGroup, LinkButton } from '@grafana/ui';
+import {
+ Button,
+ CodeEditor,
+ Collapse,
+ Field,
+ HorizontalGroup,
+ InlineField,
+ InlineFieldRow,
+ InlineSwitch,
+ Input,
+ LinkButton,
+ Select,
+ Switch,
+} from '@grafana/ui';
import { StorageView } from './types';
@@ -21,59 +34,93 @@ interface ExportStatusMessage {
status: string;
}
-interface ExportInclude {
- auth: boolean;
- ds: boolean;
- dash: boolean;
- services: boolean;
- usage: boolean;
- anno: boolean;
- snapshots: boolean;
-}
-
interface ExportJob {
- format: 'git';
+ format: string; // 'git';
generalFolderPath: string;
history: boolean;
- include: ExportInclude;
+ exclude: Record;
git?: {};
}
-const includAll: ExportInclude = {
- auth: true,
- ds: true,
- dash: true,
- services: true,
- usage: true,
- anno: true,
- snapshots: false, // will fail until we have a real user
-};
-
const defaultJob: ExportJob = {
format: 'git',
generalFolderPath: 'general',
history: true,
- include: includAll,
+ exclude: {},
git: {},
};
+interface ExporterInfo {
+ key: string;
+ name: string;
+ description: string;
+ children?: ExporterInfo[];
+}
+
+const formats: Array> = [
+ { label: 'GIT', value: 'git', description: 'Exports a fresh git repository' },
+];
+
interface Props {
onPathChange: (p: string, v?: StorageView) => void;
}
+const labelWith = 18;
+
export const ExportView = ({ onPathChange }: Props) => {
const [status, setStatus] = useState();
- const [rawBody, setBody] = useLocalStorage(EXPORT_LOCAL_STORAGE_KEY, defaultJob);
- const body = { ...defaultJob, ...rawBody, include: { ...includAll, ...rawBody?.include } };
+ const [body, setBody] = useLocalStorage(EXPORT_LOCAL_STORAGE_KEY, defaultJob);
+ const [details, setDetails] = useState(false);
+
+ const serverOptions = useAsync(() => {
+ return getBackendSrv().get<{ exporters: ExporterInfo[] }>('/api/admin/export/options');
+ }, []);
const doStart = () => {
- getBackendSrv().post('/api/admin/export', body);
+ getBackendSrv()
+ .post('/api/admin/export', body)
+ .then((v) => {
+ if (v.cfg && v.status.running) {
+ setBody(v.cfg); // saves the valid parsed body
+ }
+ });
};
+
const doStop = () => {
getBackendSrv().post('/api/admin/export/stop');
};
+ const setInclude = useCallback(
+ (k: string, v: boolean) => {
+ if (!serverOptions.value || !body) {
+ return;
+ }
+ const exclude: Record = {};
+ if (k === '*') {
+ if (!v) {
+ for (let exp of serverOptions.value.exporters) {
+ exclude[exp.key] = true;
+ }
+ }
+ setBody({ ...body, exclude });
+ return;
+ }
+
+ for (let exp of serverOptions.value.exporters) {
+ let val = body.exclude?.[exp.key];
+ if (k === exp.key) {
+ val = !v;
+ }
+ if (val) {
+ exclude[exp.key] = val;
+ }
+ }
+ setBody({ ...body, exclude });
+ },
+ [body, setBody, serverOptions]
+ );
+
useEffect(() => {
const subscription = getGrafanaLiveSrv()
.getStream({
@@ -116,18 +163,53 @@ export const ExportView = ({ onPathChange }: Props) => {
{!Boolean(status?.running) && (
)}
+
+
+
+
+ {
+ setBody(JSON.parse(text)); // force JSON?
+ }}
+ />
+
);
};
diff --git a/public/app/features/storage/FolderView.tsx b/public/app/features/storage/FolderView.tsx
index 9a10b8beaf1..02d5d7a6545 100644
--- a/public/app/features/storage/FolderView.tsx
+++ b/public/app/features/storage/FolderView.tsx
@@ -13,9 +13,10 @@ interface Props {
path: string;
onPathChange: (p: string, view?: StorageView) => void;
view: StorageView;
+ fileNames: string[];
}
-export function FolderView({ listing, path, onPathChange, view }: Props) {
+export function FolderView({ listing, path, onPathChange, view, fileNames }: Props) {
const styles = useStyles2(getStyles);
switch (view) {
@@ -35,6 +36,7 @@ export function FolderView({ listing, path, onPathChange, view }: Props) {
onPathChange(path); // back to data
}
}}
+ fileNames={fileNames}
/>
);
}
diff --git a/public/app/features/storage/StoragePage.tsx b/public/app/features/storage/StoragePage.tsx
index 64639923997..81f94cf045d 100644
--- a/public/app/features/storage/StoragePage.tsx
+++ b/public/app/features/storage/StoragePage.tsx
@@ -18,7 +18,7 @@ import { ExportView } from './ExportView';
import { FileView } from './FileView';
import { FolderView } from './FolderView';
import { RootView } from './RootView';
-import { getGrafanaStorage } from './helper';
+import { getGrafanaStorage, filenameAlreadyExists } from './helper';
import { StorageView } from './types';
interface RouteParams {
@@ -211,7 +211,7 @@ export default function StoragePage(props: Props) {
))}
{isFolder ? (
-
+
) : (
)}
@@ -231,10 +231,8 @@ export default function StoragePage(props: Props) {
}}
validate={(folderName) => {
const lowerCase = folderName.toLowerCase();
- const trimmedLowerCase = lowerCase.trim();
- const existingTrimmedLowerCaseNames = fileNames.map((f) => f.trim().toLowerCase());
- if (existingTrimmedLowerCaseNames.includes(trimmedLowerCase)) {
+ if (filenameAlreadyExists(folderName, fileNames)) {
return 'A file or a folder with the same name already exists';
}
diff --git a/public/app/features/storage/UploadView.tsx b/public/app/features/storage/UploadView.tsx
index 370f5370666..c201268f445 100644
--- a/public/app/features/storage/UploadView.tsx
+++ b/public/app/features/storage/UploadView.tsx
@@ -3,14 +3,15 @@ import React, { useState } from 'react';
import SVG from 'react-inlinesvg';
import { GrafanaTheme2 } from '@grafana/data';
-import { Button, ButtonGroup, Field, FileDropzone, useStyles2 } from '@grafana/ui';
+import { Alert, Button, ButtonGroup, Checkbox, Field, FileDropzone, useStyles2 } from '@grafana/ui';
-import { getGrafanaStorage } from './helper';
+import { filenameAlreadyExists, getGrafanaStorage } from './helper';
import { UploadReponse } from './types';
interface Props {
folder: string;
onUpload: (rsp: UploadReponse) => void;
+ fileNames: string[];
}
interface ErrorResponse {
@@ -27,12 +28,13 @@ const FileDropzoneCustomChildren = ({ secondaryText = 'Drag and drop here or bro
);
};
-export const UploadView = ({ folder, onUpload }: Props) => {
+export const UploadView = ({ folder, onUpload, fileNames }: Props) => {
const [file, setFile] = useState
(undefined);
const styles = useStyles2(getStyles);
const [error, setError] = useState({ message: '' });
+ const [overwriteExistingFile, setOverwriteExistingFile] = useState(false);
const Preview = () => {
if (!file) {
@@ -58,7 +60,7 @@ export const UploadView = ({ folder, onUpload }: Props) => {
return;
}
- const rsp = await getGrafanaStorage().upload(folder, file);
+ const rsp = await getGrafanaStorage().upload(folder, file, overwriteExistingFile);
if (rsp.status !== 200) {
setError(rsp);
} else {
@@ -66,6 +68,9 @@ export const UploadView = ({ folder, onUpload }: Props) => {
}
};
+ const filenameExists = file ? filenameAlreadyExists(file.name, fileNames) : false;
+ const isUploadDisabled = !file || (filenameExists && !overwriteExistingFile);
+
return (
{
{error.message !== '' ? {error.message}
: Boolean(file) ? : }
+ {file && filenameExists && (
+
+
+ setOverwriteExistingFile(!overwriteExistingFile)}
+ label="Overwrite existing file"
+ />
+
+
+ )}
+
-
+
Upload
@@ -133,4 +150,7 @@ const getStyles = (theme: GrafanaTheme2) => ({
color: ${theme.colors.text.secondary};
margin-bottom: ${theme.spacing(2)};
`,
+ alert: css`
+ padding-top: 10px;
+ `,
});
diff --git a/public/app/features/storage/helper.ts b/public/app/features/storage/helper.ts
index 366fe02867a..51f01daf3e9 100644
--- a/public/app/features/storage/helper.ts
+++ b/public/app/features/storage/helper.ts
@@ -7,7 +7,7 @@ import { UploadReponse } from './types';
export interface GrafanaStorage {
get:
(path: string) => Promise;
list: (path: string) => Promise;
- upload: (folder: string, file: File) => Promise;
+ upload: (folder: string, file: File, overwriteExistingFile: boolean) => Promise;
createFolder: (path: string) => Promise<{ error?: string }>;
delete: (path: { isFolder: boolean; path: string }) => Promise<{ error?: string }>;
}
@@ -82,10 +82,11 @@ class SimpleStorage implements GrafanaStorage {
return req.isFolder ? this.deleteFolder({ path: req.path, force: true }) : this.deleteFile({ path: req.path });
}
- async upload(folder: string, file: File): Promise {
+ async upload(folder: string, file: File, overwriteExistingFile: boolean): Promise {
const formData = new FormData();
formData.append('folder', folder);
formData.append('file', file);
+ formData.append('overwriteExistingFile', String(overwriteExistingFile));
const res = await fetch('/api/storage/upload', {
method: 'POST',
body: formData,
@@ -112,3 +113,11 @@ export function getGrafanaStorage() {
}
return storage;
}
+
+export function filenameAlreadyExists(folderName: string, fileNames: string[]) {
+ const lowerCase = folderName.toLowerCase();
+ const trimmedLowerCase = lowerCase.trim();
+ const existingTrimmedLowerCaseNames = fileNames.map((f) => f.trim().toLowerCase());
+
+ return existingTrimmedLowerCaseNames.includes(trimmedLowerCase);
+}
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/__mocks__/datasource.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/__mocks__/datasource.ts
index b8a37e18fc2..602c11756f7 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/__mocks__/datasource.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/__mocks__/datasource.ts
@@ -28,6 +28,7 @@ export default function createMockDatasource(overrides?: DeepPartial
getAzureLogAnalyticsWorkspaces: jest.fn().mockResolvedValueOnce([]),
+ getSubscriptions: jest.fn().mockResolvedValue([]),
getResourceGroups: jest.fn().mockResolvedValueOnce([]),
getMetricDefinitions: jest.fn().mockResolvedValueOnce([]),
getResourceNames: jest.fn().mockResolvedValueOnce([]),
@@ -43,6 +44,7 @@ export default function createMockDatasource(overrides?: DeepPartial
getResourceURIFromWorkspace: jest.fn().mockReturnValue(''),
getResourceURIDisplayProperties: jest.fn().mockResolvedValue({}),
},
+ getVariablesRaw: jest.fn().mockReturnValue([]),
...overrides,
};
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/logsResourceTypes.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/logsResourceTypes.ts
index 99c67c8a7cd..cfff000ceb1 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/logsResourceTypes.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/logsResourceTypes.ts
@@ -110,6 +110,7 @@ export const logsResourceTypes = [
'microsoft.keyvault/vaults',
'microsoft.kubernetes/connectedclusters',
'microsoft.kusto/clusters',
+ 'microsoft.loadtestservice/loadtests',
'microsoft.logic/integrationaccounts',
'microsoft.logic/integrationserviceenvironments',
'microsoft.logic/workflows',
@@ -161,6 +162,7 @@ export const logsResourceTypes = [
'microsoft.resources/subscriptions',
'microsoft.resources/subscriptions/resourcegroups',
'microsoft.search/searchservices',
+ 'microsoft.security/antimalwaresettings',
'microsoft.securityinsights/settings',
'microsoft.servicebus/namespaces',
'microsoft.signalrservice/signalr',
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/metricNamespaces.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/metricNamespaces.ts
index 49f4c7642e0..40590906ad9 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/metricNamespaces.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/azureMetadata/metricNamespaces.ts
@@ -12,9 +12,11 @@ To programatically get the list, execute in the browser console:
Note: Validate that the output makes sense, the format of the page may change.
*/
export const supportedMetricNamespaces = [
+ 'Microsoft.AAD/DomainServices',
'microsoft.aadiam/azureADMetrics',
'Microsoft.AnalysisServices/servers',
'Microsoft.ApiManagement/service',
+ 'Microsoft.App/containerapps',
'Microsoft.AppConfiguration/configurationStores',
'Microsoft.AppPlatform/Spring',
'Microsoft.Automation/automationAccounts',
@@ -58,6 +60,7 @@ export const supportedMetricNamespaces = [
'Microsoft.DataFactory/factories',
'Microsoft.DataLakeAnalytics/accounts',
'Microsoft.DataLakeStore/accounts',
+ 'Microsoft.DataProtection/BackupVaults',
'Microsoft.DataShare/accounts',
'Microsoft.DBforMariaDB/servers',
'Microsoft.DBforMySQL/flexibleServers',
@@ -73,6 +76,7 @@ export const supportedMetricNamespaces = [
'Microsoft.DigitalTwins/digitalTwinsInstances',
'Microsoft.DocumentDB/cassandraClusters',
'Microsoft.DocumentDB/DatabaseAccounts',
+ 'microsoft.edgezones/edgezones',
'Microsoft.EventGrid/domains',
'Microsoft.EventGrid/eventSubscriptions',
'Microsoft.EventGrid/extensionTopics',
@@ -111,6 +115,8 @@ export const supportedMetricNamespaces = [
'Microsoft.Network/azureFirewalls',
'microsoft.network/bastionHosts',
'Microsoft.Network/connections',
+ 'Microsoft.Network/dnsForwardingRulesets',
+ 'Microsoft.Network/dnsResolvers',
'Microsoft.Network/dnszones',
'Microsoft.Network/expressRouteCircuits',
'Microsoft.Network/expressRouteCircuits/peerings',
@@ -142,6 +148,7 @@ export const supportedMetricNamespaces = [
'Microsoft.Relay/namespaces',
'microsoft.resources/subscriptions',
'Microsoft.Search/searchServices',
+ 'microsoft.securitydetonation/chambers',
'Microsoft.ServiceBus/Namespaces',
'Microsoft.SignalRService/SignalR',
'Microsoft.SignalRService/WebPubSub',
@@ -164,6 +171,7 @@ export const supportedMetricNamespaces = [
'Microsoft.TimeSeriesInsights/environments/eventsources',
'Microsoft.VMwareCloudSimple/virtualMachines',
'Microsoft.Web/connections',
+ 'Microsoft.Web/containerapps',
'Microsoft.Web/hostingEnvironments',
'Microsoft.Web/hostingEnvironments/multiRolePools',
'Microsoft.Web/hostingEnvironments/workerPools',
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/QueryEditor.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/QueryEditor.tsx
index a85e2c7320b..65db8e09727 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/QueryEditor.tsx
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/QueryEditor.tsx
@@ -47,7 +47,7 @@ const QueryEditor: React.FC = ({
[onChange, onRunQuery]
);
- const query = usePreparedQuery(baseQuery, onQueryChange);
+ const query = usePreparedQuery(baseQuery, onQueryChange, setError);
const subscriptionId = query.subscription || datasource.azureMonitorDatasource.defaultSubscriptionId;
const variableOptionGroup = {
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/usePreparedQuery.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/usePreparedQuery.ts
index 9ed96bb1103..a400102897c 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/usePreparedQuery.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/QueryEditor/usePreparedQuery.ts
@@ -4,17 +4,20 @@ import { useEffect, useMemo } from 'react';
import { getTemplateSrv } from '@grafana/runtime';
-import { AzureMonitorQuery, AzureQueryType } from '../../types';
+import { AzureMonitorErrorish, AzureMonitorQuery, AzureQueryType } from '../../types';
import migrateQuery from '../../utils/migrateQuery';
const DEFAULT_QUERY = {
queryType: AzureQueryType.AzureMonitor,
};
-const prepareQuery = (query: AzureMonitorQuery) => {
+const prepareQuery = (
+ query: AzureMonitorQuery,
+ setError: (errorSource: string, error: AzureMonitorErrorish) => void
+) => {
// Note: _.defaults does not apply default values deeply.
const withDefaults = defaults({}, query, DEFAULT_QUERY);
- const migratedQuery = migrateQuery(withDefaults, getTemplateSrv());
+ const migratedQuery = migrateQuery(withDefaults, getTemplateSrv(), setError);
// If we didn't make any changes to the object, then return the original object to keep the
// identity the same, and not trigger any other useEffects or anything.
@@ -24,8 +27,12 @@ const prepareQuery = (query: AzureMonitorQuery) => {
/**
* Returns queries with some defaults + migrations, and calls onChange function to notify if it changes
*/
-const usePreparedQuery = (query: AzureMonitorQuery, onChangeQuery: (newQuery: AzureMonitorQuery) => void) => {
- const preparedQuery = useMemo(() => prepareQuery(query), [query]);
+const usePreparedQuery = (
+ query: AzureMonitorQuery,
+ onChangeQuery: (newQuery: AzureMonitorQuery) => void,
+ setError: (errorSource: string, error: AzureMonitorErrorish) => void
+) => {
+ const preparedQuery = useMemo(() => prepareQuery(query, setError), [query, setError]);
useEffect(() => {
if (preparedQuery !== query) {
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/GrafanaTemplateVariableFn.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/GrafanaTemplateVariableFn.tsx
new file mode 100644
index 00000000000..35fd3707468
--- /dev/null
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/GrafanaTemplateVariableFn.tsx
@@ -0,0 +1,59 @@
+import React, { ChangeEvent, useCallback, useEffect, useState } from 'react';
+
+import { InlineField, Input } from '@grafana/ui';
+
+import DataSource from '../../datasource';
+import { migrateStringQueriesToObjectQueries } from '../../grafanaTemplateVariableFns';
+import { AzureMonitorQuery, AzureQueryType } from '../../types';
+
+const GrafanaTemplateVariableFnInput = ({
+ query,
+ updateQuery,
+ datasource,
+}: {
+ query: AzureMonitorQuery;
+ updateQuery: (val: AzureMonitorQuery) => void;
+ datasource: DataSource;
+}) => {
+ const [inputVal, setInputVal] = useState('');
+
+ useEffect(() => {
+ setInputVal(query.grafanaTemplateVariableFn?.rawQuery || '');
+ }, [query.grafanaTemplateVariableFn?.rawQuery]);
+
+ const onRunQuery = useCallback(
+ (newQuery: string) => {
+ migrateStringQueriesToObjectQueries(newQuery, { datasource }).then((updatedQuery) => {
+ if (updatedQuery.queryType === AzureQueryType.GrafanaTemplateVariableFn) {
+ updateQuery(updatedQuery);
+ } else {
+ updateQuery({
+ ...query,
+ grafanaTemplateVariableFn: {
+ kind: 'UnknownQuery',
+ rawQuery: newQuery,
+ },
+ });
+ }
+ });
+ },
+ [datasource, query, updateQuery]
+ );
+
+ const onChange = (event: ChangeEvent) => {
+ setInputVal(event.target.value);
+ };
+
+ return (
+
+ onRunQuery(inputVal)}
+ />
+
+ );
+};
+
+export default GrafanaTemplateVariableFnInput;
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx
index f3e7ada507d..63a649aae0b 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.test.tsx
@@ -1,8 +1,9 @@
import { render, screen, waitFor } from '@testing-library/react';
import userEvent from '@testing-library/user-event';
import React from 'react';
-import { select } from 'react-select-event';
+import { select, openMenu } from 'react-select-event';
+import * as grafanaRuntime from '@grafana/runtime';
import * as ui from '@grafana/ui';
import createMockDatasource from '../../__mocks__/datasource';
@@ -18,81 +19,60 @@ jest.mock('@grafana/ui', () => ({
},
}));
+const defaultProps = {
+ query: {
+ refId: 'A',
+ queryType: AzureQueryType.LogAnalytics,
+ azureLogAnalytics: {
+ query: 'test query',
+ },
+ subscription: 'id',
+ },
+ onChange: jest.fn(),
+ datasource: createMockDatasource(),
+};
+
+const originalConfigValue = grafanaRuntime.config.featureToggles.azTemplateVars;
+beforeEach(() => {
+ // reset config
+ grafanaRuntime.config.featureToggles.azTemplateVars = originalConfigValue;
+});
+
describe('VariableEditor:', () => {
it('can select a query type', async () => {
const onChange = jest.fn();
-
- const props = {
- query: {
- refId: 'A',
- queryType: AzureQueryType.LogAnalytics,
- azureLogAnalytics: {
- query: 'test query',
- },
- subscription: 'id',
- },
- onChange,
- datasource: createMockDatasource(),
- };
- render( );
+ const { rerender } = render( );
await waitFor(() => screen.getByLabelText('select query type'));
expect(screen.getByLabelText('select query type')).toBeInTheDocument();
screen.getByLabelText('select query type').click();
await select(screen.getByLabelText('select query type'), 'Grafana Query Function', {
container: document.body,
});
+ expect(onChange).toHaveBeenCalledWith(
+ expect.objectContaining({
+ queryType: AzureQueryType.GrafanaTemplateVariableFn,
+ })
+ );
+ const newQuery = onChange.mock.calls.at(-1)[0];
+ rerender( );
expect(screen.queryByText('Logs')).not.toBeInTheDocument();
expect(screen.queryByText('Grafana Query Function')).toBeInTheDocument();
});
describe('log queries:', () => {
it('should render', async () => {
- const props = {
- query: {
- refId: 'A',
- queryType: AzureQueryType.LogAnalytics,
- azureLogAnalytics: {
- query: 'test query',
- },
- subscription: 'id',
- },
- onChange: () => {},
- datasource: createMockDatasource(),
- };
- render( );
+ render( );
await waitFor(() => screen.queryByTestId('mockeditor'));
expect(screen.queryByText('Resource')).toBeInTheDocument();
expect(screen.queryByTestId('mockeditor')).toBeInTheDocument();
});
- it('should render with legacy query strings', async () => {
- const props = {
- query: 'test query',
- onChange: () => {},
- datasource: createMockDatasource(),
- };
- render( );
- await waitFor(() => screen.queryByTestId('mockeditor'));
- expect(screen.queryByText('Resource')).toBeInTheDocument();
- expect(screen.queryByTestId('mockeditor')).toBeInTheDocument();
- });
it('should call on change if the query changes', async () => {
- const props = {
- query: {
- refId: 'A',
- queryType: AzureQueryType.LogAnalytics,
- azureLogAnalytics: {
- query: 'test query',
- },
- subscription: 'id',
- },
- onChange: jest.fn(),
- datasource: createMockDatasource(),
- };
- render( );
+ const onChange = jest.fn();
+ render( );
await waitFor(() => screen.queryByTestId('mockeditor'));
expect(screen.queryByTestId('mockeditor')).toBeInTheDocument();
await userEvent.type(screen.getByTestId('mockeditor'), '{backspace}');
- expect(props.onChange).toHaveBeenCalledWith({
+ expect(onChange).toHaveBeenCalledWith({
azureLogAnalytics: {
query: 'test quer',
},
@@ -106,6 +86,7 @@ describe('VariableEditor:', () => {
describe('grafana template variable fn queries:', () => {
it('should render', async () => {
const props = {
+ ...defaultProps,
query: {
refId: 'A',
queryType: AzureQueryType.GrafanaTemplateVariableFn,
@@ -115,8 +96,6 @@ describe('VariableEditor:', () => {
},
subscription: 'id',
} as AzureMonitorQuery,
- onChange: () => {},
- datasource: createMockDatasource(),
};
render( );
await waitFor(() => screen.queryByText('Grafana template variable function'));
@@ -126,6 +105,7 @@ describe('VariableEditor:', () => {
it('should call on change if the query changes', async () => {
const props = {
+ ...defaultProps,
query: {
refId: 'A',
queryType: AzureQueryType.GrafanaTemplateVariableFn,
@@ -135,8 +115,6 @@ describe('VariableEditor:', () => {
},
subscription: 'subscriptionId',
} as AzureMonitorQuery,
- onChange: jest.fn(),
- datasource: createMockDatasource(),
};
render( );
await waitFor(() => screen.queryByText('Grafana template variable function'));
@@ -155,4 +133,95 @@ describe('VariableEditor:', () => {
});
});
});
+
+ describe('predefined queries:', () => {
+ it('should show the new query types if feature gate is enabled', async () => {
+ grafanaRuntime.config.featureToggles.azTemplateVars = true;
+ render( );
+ openMenu(screen.getByLabelText('select query type'));
+ await waitFor(() => expect(screen.getByText('Subscriptions')).toBeInTheDocument());
+ });
+
+ it('should not show the new query types if feature gate is disabled', async () => {
+ grafanaRuntime.config.featureToggles.azTemplateVars = false;
+ render( );
+ openMenu(screen.getByLabelText('select query type'));
+ await waitFor(() => expect(screen.queryByText('Subscriptions')).not.toBeInTheDocument());
+ });
+
+ it('should run the query if requesting subscriptions', async () => {
+ grafanaRuntime.config.featureToggles.azTemplateVars = true;
+ const onChange = jest.fn();
+ const { rerender } = render( );
+ openMenu(screen.getByLabelText('select query type'));
+ screen.getByText('Subscriptions').click();
+ // Simulate onChange behavior
+ const newQuery = onChange.mock.calls.at(-1)[0];
+ rerender( );
+ await waitFor(() => expect(screen.getByText('Subscriptions')).toBeInTheDocument());
+ expect(onChange).toHaveBeenCalledWith(
+ expect.objectContaining({ queryType: AzureQueryType.SubscriptionsQuery, refId: 'A' })
+ );
+ });
+
+ it('should run the query if requesting resource groups', async () => {
+ grafanaRuntime.config.featureToggles.azTemplateVars = true;
+ const ds = createMockDatasource({
+ getSubscriptions: jest.fn().mockResolvedValue([{ text: 'Primary Subscription', value: 'sub' }]),
+ });
+ const onChange = jest.fn();
+ const { rerender } = render( );
+ // wait for initial load
+ await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument());
+ // Select RGs variable
+ openMenu(screen.getByLabelText('select query type'));
+ screen.getByText('Resource Groups').click();
+ // Simulate onChange behavior
+ const newQuery = onChange.mock.calls.at(-1)[0];
+ rerender( );
+ await waitFor(() => expect(screen.getByText('Select subscription')).toBeInTheDocument());
+ // Select a subscription
+ openMenu(screen.getByLabelText('select subscription'));
+ screen.getByText('Primary Subscription').click();
+ expect(onChange).toHaveBeenCalledWith(
+ expect.objectContaining({
+ queryType: AzureQueryType.ResourceGroupsQuery,
+ subscription: 'sub',
+ refId: 'A',
+ })
+ );
+ });
+
+ it('should show template variables as options ', async () => {
+ const onChange = jest.fn();
+ grafanaRuntime.config.featureToggles.azTemplateVars = true;
+ const ds = createMockDatasource({
+ getSubscriptions: jest.fn().mockResolvedValue([{ text: 'Primary Subscription', value: 'sub' }]),
+ getVariablesRaw: jest.fn().mockReturnValue([
+ { label: 'query0', name: 'sub0' },
+ { label: 'query1', name: 'rg', query: { queryType: AzureQueryType.ResourceGroupsQuery } },
+ ]),
+ });
+ const { rerender } = render( );
+ // wait for initial load
+ await waitFor(() => expect(screen.getByText('Logs')).toBeInTheDocument());
+ // Select RGs variable
+ openMenu(screen.getByLabelText('select query type'));
+ screen.getByText('Resource Groups').click();
+ // Simulate onChange behavior
+ const newQuery = onChange.mock.calls.at(-1)[0];
+ rerender( );
+ await waitFor(() => expect(screen.getByText('Select subscription')).toBeInTheDocument());
+ // Select a subscription
+ openMenu(screen.getByLabelText('select subscription'));
+ await waitFor(() => expect(screen.getByText('Primary Subscription')).toBeInTheDocument());
+ screen.getByText('Template Variables').click();
+ // Simulate onChange behavior
+ const lastQuery = onChange.mock.calls.at(-1)[0];
+ rerender( );
+ await waitFor(() => expect(screen.getByText('query0')).toBeInTheDocument());
+ // Template variables of the same type than the current one should not appear
+ expect(screen.queryByText('query1')).not.toBeInTheDocument();
+ });
+ });
});
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx
index 7dca2a9fdae..08cc4aae721 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/components/VariableEditor/VariableEditor.tsx
@@ -1,68 +1,19 @@
-import React, { ChangeEvent, useCallback, useEffect, useState } from 'react';
+import { get } from 'lodash';
+import React, { useEffect, useState } from 'react';
+import { useEffectOnce } from 'react-use';
import { SelectableValue } from '@grafana/data';
-import { Alert, InlineField, Input, Select } from '@grafana/ui';
+import { config } from '@grafana/runtime';
+import { Alert, InlineField, Select } from '@grafana/ui';
import DataSource from '../../datasource';
import { migrateStringQueriesToObjectQueries } from '../../grafanaTemplateVariableFns';
-import { AzureMonitorQuery, AzureQueryType } from '../../types';
+import { AzureMonitorOption, AzureMonitorQuery, AzureQueryType } from '../../types';
import useLastError from '../../utils/useLastError';
import LogsQueryEditor from '../LogsQueryEditor';
import { Space } from '../Space';
-const AZURE_QUERY_VARIABLE_TYPE_OPTIONS = [
- { label: 'Grafana Query Function', value: AzureQueryType.GrafanaTemplateVariableFn },
- { label: 'Logs', value: AzureQueryType.LogAnalytics },
-];
-
-const GrafanaTemplateVariableFnInput = ({
- query,
- updateQuery,
- datasource,
-}: {
- query: AzureMonitorQuery;
- updateQuery: (val: AzureMonitorQuery) => void;
- datasource: DataSource;
-}) => {
- const [inputVal, setInputVal] = useState('');
- useEffect(() => {
- setInputVal(query.grafanaTemplateVariableFn?.rawQuery || '');
- }, [query.grafanaTemplateVariableFn?.rawQuery]);
-
- const onRunQuery = useCallback(
- (newQuery: string) => {
- migrateStringQueriesToObjectQueries(newQuery, { datasource }).then((updatedQuery) => {
- if (updatedQuery.queryType === AzureQueryType.GrafanaTemplateVariableFn) {
- updateQuery(updatedQuery);
- } else {
- updateQuery({
- ...query,
- grafanaTemplateVariableFn: {
- kind: 'UnknownQuery',
- rawQuery: newQuery,
- },
- });
- }
- });
- },
- [datasource, query, updateQuery]
- );
-
- const onChange = (event: ChangeEvent) => {
- setInputVal(event.target.value);
- };
-
- return (
-
- onRunQuery(inputVal)}
- />
-
- );
-};
+import GrafanaTemplateVariableFnInput from './GrafanaTemplateVariableFn';
type Props = {
query: AzureMonitorQuery | string;
@@ -71,61 +22,103 @@ type Props = {
};
const VariableEditor = (props: Props) => {
- const defaultQuery: AzureMonitorQuery = {
- refId: 'A',
- queryType: AzureQueryType.GrafanaTemplateVariableFn,
- };
- const [query, setQuery] = useState(defaultQuery);
+ const { query, onChange, datasource } = props;
+ const AZURE_QUERY_VARIABLE_TYPE_OPTIONS = [
+ { label: 'Grafana Query Function', value: AzureQueryType.GrafanaTemplateVariableFn },
+ { label: 'Logs', value: AzureQueryType.LogAnalytics },
+ ];
+ if (config.featureToggles.azTemplateVars) {
+ AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Subscriptions', value: AzureQueryType.SubscriptionsQuery });
+ AZURE_QUERY_VARIABLE_TYPE_OPTIONS.push({ label: 'Resource Groups', value: AzureQueryType.ResourceGroupsQuery });
+ }
+ const [variableOptionGroup, setVariableOptionGroup] = useState<{ label: string; options: AzureMonitorOption[] }>({
+ label: 'Template Variables',
+ options: [],
+ });
+ const [requireSubscription, setRequireSubscription] = useState(false);
+ const [subscriptions, setSubscriptions] = useState([]);
+ const [errorMessage, setError] = useLastError();
+ const queryType = typeof query === 'string' ? '' : query.queryType;
useEffect(() => {
- migrateStringQueriesToObjectQueries(props.query, { datasource: props.datasource }).then((migratedQuery) => {
- setQuery(migratedQuery);
+ migrateStringQueriesToObjectQueries(query, { datasource: datasource }).then((migratedQuery) => {
+ onChange(migratedQuery);
});
- }, [props.query, props.datasource]);
+ }, [query, datasource, onChange]);
+
+ useEffect(() => {
+ switch (queryType) {
+ case AzureQueryType.ResourceGroupsQuery:
+ setRequireSubscription(true);
+ break;
+ default:
+ setRequireSubscription(false);
+ }
+ }, [queryType]);
+
+ useEffect(() => {
+ const options: AzureMonitorOption[] = [];
+ datasource.getVariablesRaw().forEach((v) => {
+ if (get(v, 'query.queryType') !== queryType) {
+ options.push({ label: v.label || v.name, value: `$${v.name}` });
+ }
+ });
+ setVariableOptionGroup({
+ label: 'Template Variables',
+ options,
+ });
+ }, [datasource, queryType]);
+
+ useEffectOnce(() => {
+ datasource.getSubscriptions().then((subs) => {
+ setSubscriptions(subs.map((s) => ({ label: s.text, value: s.value })));
+ });
+ });
+
+ if (typeof query === 'string') {
+ // still migrating the query
+ return null;
+ }
const onQueryTypeChange = (selectableValue: SelectableValue) => {
if (selectableValue.value) {
- setQuery({
+ onChange({
...query,
queryType: selectableValue.value,
});
}
};
- const onLogsQueryChange = (queryChange: AzureMonitorQuery) => {
- setQuery(queryChange);
- // only hit backend if there's something to query (prevents error when selecting the resource before pinging a query)
- if (queryChange.azureLogAnalytics?.query) {
- props.onChange(queryChange);
+ const onChangeSubscription = (selectableValue: SelectableValue) => {
+ if (selectableValue.value) {
+ onChange({
+ ...query,
+ subscription: selectableValue.value,
+ });
}
};
- const [errorMessage, setError] = useLastError();
-
- const variableOptionGroup = {
- label: 'Template Variables',
- // TODO: figure out a way to filter out the current variable from the variables list
- // options: props.datasource.getVariables().map((v) => ({ label: v, value: v })),
- options: [],
+ const onLogsQueryChange = (queryChange: AzureMonitorQuery) => {
+ onChange(queryChange);
};
return (
<>
-
+
- {query.queryType === AzureQueryType.LogAnalytics && (
+ {typeof query === 'object' && query.queryType === AzureQueryType.LogAnalytics && (
<>
{
)}
>
)}
- {query.queryType === AzureQueryType.GrafanaTemplateVariableFn && (
-
+ {typeof query === 'object' && query.queryType === AzureQueryType.GrafanaTemplateVariableFn && (
+
+ )}
+ {typeof query === 'object' && requireSubscription && (
+
+
+
)}
>
);
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/datasource.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/datasource.ts
index 9906180e042..0366efec881 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/datasource.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/datasource.ts
@@ -190,6 +190,10 @@ export default class Datasource extends DataSourceWithBackend `$${v.name}`);
}
+
+ getVariablesRaw() {
+ return this.templateSrv.getVariables();
+ }
}
function hasQueryForType(query: AzureMonitorQuery): boolean {
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/query.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/query.ts
index 6a65efe6f2d..344de077da8 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/query.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/query.ts
@@ -6,6 +6,9 @@ export enum AzureQueryType {
AzureMonitor = 'Azure Monitor',
LogAnalytics = 'Azure Log Analytics',
AzureResourceGraph = 'Azure Resource Graph',
+ SubscriptionsQuery = 'Azure Subscriptions',
+ ResourceGroupsQuery = 'Azure Resource Groups',
+ /** Deprecated */
GrafanaTemplateVariableFn = 'Grafana Template Variable Function',
}
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/types.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/types.ts
index 198060b8bbe..286ef80aeff 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/types.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/types/types.ts
@@ -85,7 +85,7 @@ export interface AzureDataSourceSecureJsonData {
// Represents an errors that come back from frontend requests.
// Not totally sure how accurate this type is.
-export type AzureMonitorErrorish = Error;
+export type AzureMonitorErrorish = Error | string | React.ReactElement;
// Azure Monitor API Types
export interface AzureMonitorMetricsMetadataResponse {
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/messageFromError.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/messageFromError.ts
index aa0bb0a61ad..2a7236df003 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/messageFromError.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/messageFromError.ts
@@ -1,3 +1,15 @@
+import { isValidElement } from 'react';
+
+import { AzureMonitorErrorish } from '../types';
+
+export function messageFromElement(error: AzureMonitorErrorish): AzureMonitorErrorish | undefined {
+ if (isValidElement(error)) {
+ return error;
+ } else {
+ return messageFromError(error);
+ }
+}
+
export default function messageFromError(error: any): string | undefined {
if (!error || typeof error !== 'object') {
return undefined;
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.test.ts
index b9ef2a6f01f..9c047a84b71 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.test.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.test.ts
@@ -1,6 +1,8 @@
+import React from 'react';
+
import { getTemplateSrv } from '@grafana/runtime';
-import { AzureMetricDimension, AzureMonitorQuery, AzureQueryType } from '../types';
+import { AzureMetricDimension, AzureMonitorErrorish, AzureMonitorQuery, AzureQueryType } from '../types';
import migrateQuery from './migrateQuery';
@@ -17,6 +19,8 @@ jest.mock('@grafana/runtime', () => {
let templateSrv = getTemplateSrv();
+let setErrorMock = jest.fn();
+
const azureMonitorQueryV7 = {
appInsights: { dimension: [], metricName: 'select', timeGrain: 'auto' },
azureLogAnalytics: {
@@ -97,7 +101,7 @@ const modernMetricsQuery: AzureMonitorQuery = {
describe('AzureMonitor: migrateQuery', () => {
it('modern queries should not change', () => {
- const result = migrateQuery(modernMetricsQuery, templateSrv);
+ const result = migrateQuery(modernMetricsQuery, templateSrv, setErrorMock);
// MUST use .toBe because we want to assert that the identity of unmigrated queries remains the same
expect(modernMetricsQuery).toBe(result);
@@ -105,7 +109,7 @@ describe('AzureMonitor: migrateQuery', () => {
describe('migrating from a v7 query to the latest query version', () => {
it('should build a resource uri', () => {
- const result = migrateQuery(azureMonitorQueryV7, templateSrv);
+ const result = migrateQuery(azureMonitorQueryV7, templateSrv, setErrorMock);
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -119,7 +123,7 @@ describe('AzureMonitor: migrateQuery', () => {
describe('migrating from a v8 query to the latest query version', () => {
it('should build a resource uri', () => {
- const result = migrateQuery(azureMonitorQueryV8, templateSrv);
+ const result = migrateQuery(azureMonitorQueryV8, templateSrv, setErrorMock);
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -130,18 +134,66 @@ describe('AzureMonitor: migrateQuery', () => {
);
});
- it('should not build a resource uri with an unsupported template variable', () => {
- replaceMock = jest.fn().mockImplementation((s: string) => s.replace('$ns', 'Microsoft.Storage/storageAccounts'));
+ it('should not build a resource uri with an unsupported namespace template variable', () => {
+ replaceMock = jest
+ .fn()
+ .mockImplementation((s: string) => s.replace('$ns', 'Microsoft.Storage/storageAccounts/tableServices'));
+ setErrorMock = jest
+ .fn()
+ .mockImplementation((errorSource: string, error: AzureMonitorErrorish) => 'Template Var error');
+ const errorElement = React.createElement(
+ 'div',
+ null,
+ `Failed to create resource URI. Validate the metric definition template variable against supported cases `,
+ React.createElement(
+ 'a',
+ {
+ href: 'https://grafana.com/docs/grafana/latest/datasources/azuremonitor/template-variables/',
+ },
+ 'here.'
+ )
+ );
templateSrv = getTemplateSrv();
const query = {
...azureMonitorQueryV8,
azureMonitor: {
- ...azureMonitorQueryV8,
+ ...azureMonitorQueryV8.azureMonitor,
metricDefinition: '$ns',
},
};
- const result = migrateQuery(query, templateSrv);
+ const result = migrateQuery(query, templateSrv, setErrorMock);
+ expect(result.azureMonitor?.resourceUri).toBeUndefined();
+ expect(setErrorMock).toHaveBeenCalledWith('Resource URI migration', errorElement);
+ });
+
+ it('should not build a resource uri with unsupported resource name template variable', () => {
+ replaceMock = jest.fn().mockImplementation((s: string) => s.replace('$resource', 'resource/default'));
+ setErrorMock = jest
+ .fn()
+ .mockImplementation((errorSource: string, error: AzureMonitorErrorish) => 'Template Var error');
+ const errorElement = React.createElement(
+ 'div',
+ null,
+ `Failed to create resource URI. Validate the resource name template variable against supported cases `,
+ React.createElement(
+ 'a',
+ {
+ href: 'https://grafana.com/docs/grafana/latest/datasources/azuremonitor/template-variables/',
+ },
+ 'here.'
+ )
+ );
+ templateSrv = getTemplateSrv();
+ const query = {
+ ...azureMonitorQueryV8,
+ azureMonitor: {
+ ...azureMonitorQueryV8.azureMonitor,
+ resourceName: '$resource',
+ },
+ };
+ const result = migrateQuery(query, templateSrv, setErrorMock);
expect(result.azureMonitor?.resourceUri).toBeUndefined();
+ expect(setErrorMock).toHaveBeenCalledWith('Resource URI migration', errorElement);
});
});
@@ -150,7 +202,11 @@ describe('AzureMonitor: migrateQuery', () => {
const dimensionFilters: AzureMetricDimension[] = [
{ dimension: 'TestDimension', operator: 'eq', filters: ['testFilter'] },
];
- const result = migrateQuery({ ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } }, templateSrv);
+ const result = migrateQuery(
+ { ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } },
+ templateSrv,
+ setErrorMock
+ );
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -161,7 +217,11 @@ describe('AzureMonitor: migrateQuery', () => {
});
it('correctly updates old filter containing wildcard', () => {
const dimensionFilters: AzureMetricDimension[] = [{ dimension: 'TestDimension', operator: 'eq', filter: '*' }];
- const result = migrateQuery({ ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } }, templateSrv);
+ const result = migrateQuery(
+ { ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } },
+ templateSrv,
+ setErrorMock
+ );
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -174,7 +234,11 @@ describe('AzureMonitor: migrateQuery', () => {
});
it('correctly updates old filter containing value', () => {
const dimensionFilters: AzureMetricDimension[] = [{ dimension: 'TestDimension', operator: 'eq', filter: 'test' }];
- const result = migrateQuery({ ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } }, templateSrv);
+ const result = migrateQuery(
+ { ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } },
+ templateSrv,
+ setErrorMock
+ );
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -189,7 +253,11 @@ describe('AzureMonitor: migrateQuery', () => {
const dimensionFilters: AzureMetricDimension[] = [
{ dimension: 'TestDimension', operator: 'eq', filter: '*', filters: ['testFilter'] },
];
- const result = migrateQuery({ ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } }, templateSrv);
+ const result = migrateQuery(
+ { ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } },
+ templateSrv,
+ setErrorMock
+ );
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
@@ -208,7 +276,11 @@ describe('AzureMonitor: migrateQuery', () => {
const dimensionFilters: AzureMetricDimension[] = [
{ dimension: 'TestDimension', operator: 'eq', filter: 'testFilter', filters: ['testFilter'] },
];
- const result = migrateQuery({ ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } }, templateSrv);
+ const result = migrateQuery(
+ { ...azureMonitorQueryV8, azureMonitor: { dimensionFilters } },
+ templateSrv,
+ setErrorMock
+ );
expect(result).toMatchObject(
expect.objectContaining({
azureMonitor: expect.objectContaining({
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.ts
index 54d569302b4..6492f204d27 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/migrateQuery.ts
@@ -1,3 +1,5 @@
+import React from 'react';
+
import { TemplateSrv } from '@grafana/runtime';
import UrlBuilder from '../azure_monitor/url_builder';
@@ -7,11 +9,15 @@ import {
setTimeGrain as setMetricsTimeGrain,
} from '../components/MetricsQueryEditor/setQueryValue';
import TimegrainConverter from '../time_grain_converter';
-import { AzureMetricDimension, AzureMonitorQuery, AzureQueryType } from '../types';
+import { AzureMetricDimension, AzureMonitorErrorish, AzureMonitorQuery, AzureQueryType } from '../types';
const OLD_DEFAULT_DROPDOWN_VALUE = 'select';
-export default function migrateQuery(query: AzureMonitorQuery, templateSrv: TemplateSrv): AzureMonitorQuery {
+export default function migrateQuery(
+ query: AzureMonitorQuery,
+ templateSrv: TemplateSrv,
+ setError: (errorSource: string, error: AzureMonitorErrorish) => void
+): AzureMonitorQuery {
let workingQuery = query;
// The old angular controller also had a `migrateApplicationInsightsKeys` migraiton that
@@ -23,7 +29,7 @@ export default function migrateQuery(query: AzureMonitorQuery, templateSrv: Temp
workingQuery = migrateLogAnalyticsToFromTimes(workingQuery);
workingQuery = migrateToDefaultNamespace(workingQuery);
workingQuery = migrateDimensionToDimensionFilter(workingQuery);
- workingQuery = migrateResourceUri(workingQuery, templateSrv);
+ workingQuery = migrateResourceUri(workingQuery, templateSrv, setError);
workingQuery = migrateDimensionFilterToArray(workingQuery);
return workingQuery;
@@ -98,7 +104,11 @@ function migrateDimensionToDimensionFilter(query: AzureMonitorQuery): AzureMonit
// Azure Monitor metric queries prior to Grafana version 9 did not include a `resourceUri`.
// The resourceUri was previously constructed with the subscription id, resource group,
// metric definition (a.k.a. resource type), and the resource name.
-function migrateResourceUri(query: AzureMonitorQuery, templateSrv: TemplateSrv): AzureMonitorQuery {
+function migrateResourceUri(
+ query: AzureMonitorQuery,
+ templateSrv: TemplateSrv,
+ setError?: (errorSource: string, error: AzureMonitorErrorish) => void
+): AzureMonitorQuery {
const azureMonitorQuery = query.azureMonitor;
if (!azureMonitorQuery || azureMonitorQuery.resourceUri) {
@@ -116,6 +126,23 @@ function migrateResourceUri(query: AzureMonitorQuery, templateSrv: TemplateSrv):
// If a metric definition includes template variable with a subresource e.g.
// Microsoft.Storage/storageAccounts/libraries, it's not possible to generate a valid
// resource URI
+ if (setError) {
+ setError(
+ 'Resource URI migration',
+ React.createElement(
+ 'div',
+ null,
+ `Failed to create resource URI. Validate the metric definition template variable against supported cases `,
+ React.createElement(
+ 'a',
+ {
+ href: 'https://grafana.com/docs/grafana/latest/datasources/azuremonitor/template-variables/',
+ },
+ 'here.'
+ )
+ )
+ );
+ }
return query;
}
@@ -123,6 +150,23 @@ function migrateResourceUri(query: AzureMonitorQuery, templateSrv: TemplateSrv):
if (resourceNameArray.some((p) => templateSrv.replace(p).split('/').length > 1)) {
// If a resource name includes template variable with a subresource e.g.
// abc123/def456, it's not possible to generate a valid resource URI
+ if (setError) {
+ setError(
+ 'Resource URI migration',
+ React.createElement(
+ 'div',
+ null,
+ `Failed to create resource URI. Validate the resource name template variable against supported cases `,
+ React.createElement(
+ 'a',
+ {
+ href: 'https://grafana.com/docs/grafana/latest/datasources/azuremonitor/template-variables/',
+ },
+ 'here.'
+ )
+ )
+ );
+ }
return query;
}
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/useLastError.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/useLastError.ts
index 8fb0246e1d5..1c26117b2b2 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/useLastError.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/utils/useLastError.ts
@@ -2,7 +2,7 @@ import { useState, useCallback, useMemo } from 'react';
import { AzureMonitorErrorish } from '../types';
-import messageFromError from './messageFromError';
+import { messageFromElement } from './messageFromError';
type SourcedError = [string, AzureMonitorErrorish];
@@ -33,7 +33,7 @@ export default function useLastError() {
const errorMessage = useMemo(() => {
const recentError = errors[0];
- return recentError && messageFromError(recentError[1]);
+ return recentError && messageFromElement(recentError[1]);
}, [errors]);
return [errorMessage, addError] as const;
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.test.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.test.ts
index ed5ce12ce7d..12a9bbc60d3 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.test.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.test.ts
@@ -513,4 +513,51 @@ describe('VariableSupport', () => {
done();
});
});
+
+ describe('predefined functions', () => {
+ it('can fetch subscriptions', (done) => {
+ const fakeSubscriptions = ['subscriptionId'];
+ const variableSupport = new VariableSupport(
+ createMockDatasource({
+ getSubscriptions: jest.fn().mockResolvedValueOnce(fakeSubscriptions),
+ })
+ );
+ const mockRequest = {
+ targets: [
+ {
+ refId: 'A',
+ queryType: AzureQueryType.SubscriptionsQuery,
+ } as AzureMonitorQuery,
+ ],
+ } as DataQueryRequest;
+ const observables = variableSupport.query(mockRequest);
+ observables.subscribe((result: DataQueryResponseData) => {
+ expect(result.data[0].source).toEqual(fakeSubscriptions);
+ done();
+ });
+ });
+
+ it('can fetch resourceGroups', (done) => {
+ const expectedResults = ['test'];
+ const variableSupport = new VariableSupport(
+ createMockDatasource({
+ getResourceGroups: jest.fn().mockResolvedValueOnce(expectedResults),
+ })
+ );
+ const mockRequest = {
+ targets: [
+ {
+ refId: 'A',
+ queryType: AzureQueryType.ResourceGroupsQuery,
+ subscription: 'sub',
+ } as AzureMonitorQuery,
+ ],
+ } as DataQueryRequest;
+ const observables = variableSupport.query(mockRequest);
+ observables.subscribe((result: DataQueryResponseData) => {
+ expect(result.data[0].source).toEqual(expectedResults);
+ done();
+ });
+ });
+ });
});
diff --git a/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.ts b/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.ts
index bf49cad6800..991eca7ae99 100644
--- a/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.ts
+++ b/public/app/plugins/datasource/grafana-azure-monitor-datasource/variables.ts
@@ -29,18 +29,36 @@ export class VariableSupport extends CustomVariableSupport {
const queryObj = await migrateStringQueriesToObjectQueries(request.targets[0], { datasource: this.datasource });
- if (queryObj.queryType === AzureQueryType.GrafanaTemplateVariableFn && queryObj.grafanaTemplateVariableFn) {
- try {
- const templateVariablesResults = await this.callGrafanaTemplateVariableFn(queryObj.grafanaTemplateVariableFn);
- return {
- data: templateVariablesResults?.length ? [toDataFrame(templateVariablesResults)] : [],
- };
- } catch (err) {
- return { data: [], error: { message: messageFromError(err) } };
+ try {
+ switch (queryObj.queryType) {
+ case AzureQueryType.SubscriptionsQuery:
+ const res = await this.datasource.getSubscriptions();
+ return {
+ data: res?.length ? [toDataFrame(res)] : [],
+ };
+ case AzureQueryType.ResourceGroupsQuery:
+ if (queryObj.subscription) {
+ const rgs = await this.datasource.getResourceGroups(queryObj.subscription);
+ return {
+ data: rgs?.length ? [toDataFrame(rgs)] : [],
+ };
+ }
+ case AzureQueryType.GrafanaTemplateVariableFn:
+ if (queryObj.grafanaTemplateVariableFn) {
+ const templateVariablesResults = await this.callGrafanaTemplateVariableFn(
+ queryObj.grafanaTemplateVariableFn
+ );
+ return {
+ data: templateVariablesResults?.length ? [toDataFrame(templateVariablesResults)] : [],
+ };
+ }
+ default:
+ request.targets[0] = queryObj;
+ return lastValueFrom(this.datasource.query(request));
}
+ } catch (err) {
+ return { data: [], error: { message: messageFromError(err) } };
}
- request.targets[0] = queryObj;
- return lastValueFrom(this.datasource.query(request));
};
return from(promisedResults());
diff --git a/public/app/plugins/datasource/loki/addToQuery.ts b/public/app/plugins/datasource/loki/addToQuery.ts
index a3ce4ff7e77..34b27a6cbe0 100644
--- a/public/app/plugins/datasource/loki/addToQuery.ts
+++ b/public/app/plugins/datasource/loki/addToQuery.ts
@@ -57,6 +57,22 @@ export function addParserToQuery(query: string, parser: string): string {
}
}
+/**
+ * Adds filtering for pipeline errors to existing query. Useful for query modification for hints.
+ * It uses LogQL parser to find parsers and adds pipeline errors filtering after them.
+ *
+ * @param query
+ */
+export function addNoPipelineErrorToQuery(query: string): string {
+ const parserPositions = getParserPositions(query);
+ if (!parserPositions.length) {
+ return query;
+ }
+
+ const filter = toLabelFilter('__error__', '', '=');
+ return addFilterAsLabelFilter(query, parserPositions, filter);
+}
+
/**
* Parse the string and get all Selector positions in the query together with parsed representation of the
* selector.
@@ -85,7 +101,7 @@ export function getParserPositions(query: string): Position[] {
const positions: Position[] = [];
tree.iterate({
enter: (type, from, to, get): false | void => {
- if (type.name === 'LabelParser') {
+ if (type.name === 'LabelParser' || type.name === 'JsonExpressionParser') {
positions.push({ from, to });
return false;
}
diff --git a/public/app/plugins/datasource/loki/addtoQuery.test.ts b/public/app/plugins/datasource/loki/addtoQuery.test.ts
index dc3765bc7a7..4d69467d11a 100644
--- a/public/app/plugins/datasource/loki/addtoQuery.test.ts
+++ b/public/app/plugins/datasource/loki/addtoQuery.test.ts
@@ -1,4 +1,4 @@
-import { addLabelToQuery, addParserToQuery } from './addToQuery';
+import { addLabelToQuery, addNoPipelineErrorToQuery, addParserToQuery } from './addToQuery';
describe('addLabelToQuery()', () => {
it('should add label to simple query', () => {
@@ -177,3 +177,19 @@ describe('addParserToQuery', () => {
});
});
});
+
+describe('addNoPipelineErrorToQuery', () => {
+ it('should add error filtering after logfmt parser', () => {
+ expect(addNoPipelineErrorToQuery('{job="grafana"} | logfmt')).toBe('{job="grafana"} | logfmt | __error__=``');
+ });
+
+ it('should add error filtering after json parser with expressions', () => {
+ expect(addNoPipelineErrorToQuery('{job="grafana"} | json foo="bar", bar="baz"')).toBe(
+ '{job="grafana"} | json foo="bar", bar="baz" | __error__=``'
+ );
+ });
+
+ it('should not add error filtering if no parser', () => {
+ expect(addNoPipelineErrorToQuery('{job="grafana"} |="no parser"')).toBe('{job="grafana"} |="no parser"');
+ });
+});
diff --git a/public/app/plugins/datasource/loki/datasource.ts b/public/app/plugins/datasource/loki/datasource.ts
index 7c90a78b2a7..d1b18db48d1 100644
--- a/public/app/plugins/datasource/loki/datasource.ts
+++ b/public/app/plugins/datasource/loki/datasource.ts
@@ -1,9 +1,7 @@
-// Libraries
import { cloneDeep, map as lodashMap } from 'lodash';
import { lastValueFrom, merge, Observable, of, throwError } from 'rxjs';
import { catchError, map, switchMap } from 'rxjs/operators';
-// Types
import {
AnnotationEvent,
AnnotationQueryRequest,
@@ -45,7 +43,7 @@ import { getTemplateSrv, TemplateSrv } from 'app/features/templating/template_sr
import { serializeParams } from '../../../core/utils/fetch';
import { renderLegendFormat } from '../prometheus/legend';
-import { addLabelToQuery, addParserToQuery } from './addToQuery';
+import { addLabelToQuery, addNoPipelineErrorToQuery, addParserToQuery } from './addToQuery';
import { transformBackendResult } from './backendResultTransformer';
import { LokiAnnotationsQueryEditor } from './components/AnnotationsQueryEditor';
import LanguageProvider from './language_provider';
@@ -359,7 +357,7 @@ export class LokiDatasource
expr: query.expr,
queryType: LokiQueryType.Range,
refId: 'log-samples',
- maxLines: 10,
+ maxLines: 50,
};
// For samples, we use defaultTimeRange (now-6h/now) and limit od 10 lines so queries are small and fast
@@ -410,6 +408,10 @@ export class LokiDatasource
expression = addParserToQuery(expression, 'json');
break;
}
+ case 'ADD_NO_PIPELINE_ERROR': {
+ expression = addNoPipelineErrorToQuery(expression);
+ break;
+ }
default:
break;
}
diff --git a/public/app/plugins/datasource/loki/queryHints.ts b/public/app/plugins/datasource/loki/queryHints.ts
index 64e02f365da..a31a5dfbdf4 100644
--- a/public/app/plugins/datasource/loki/queryHints.ts
+++ b/public/app/plugins/datasource/loki/queryHints.ts
@@ -1,15 +1,19 @@
import { DataFrame, QueryHint } from '@grafana/data';
-import { isQueryWithParser } from './query_utils';
-import { extractLogParserFromDataFrame } from './responseUtils';
+import { isQueryPipelineErrorFiltering, isQueryWithParser } from './query_utils';
+import { extractHasErrorLabelFromDataFrame, extractLogParserFromDataFrame } from './responseUtils';
export function getQueryHints(query: string, series: DataFrame[]): QueryHint[] {
+ if (series.length === 0) {
+ return [];
+ }
+
const hints: QueryHint[] = [];
- if (series.length > 0) {
- const { hasLogfmt, hasJSON } = extractLogParserFromDataFrame(series[0]);
- const queryWithParser = isQueryWithParser(query);
+ const { queryWithParser, parserCount } = isQueryWithParser(query);
- if (hasJSON && !queryWithParser) {
+ if (!queryWithParser) {
+ const { hasLogfmt, hasJSON } = extractLogParserFromDataFrame(series[0]);
+ if (hasJSON) {
hints.push({
type: 'ADD_JSON_PARSER',
label: 'Selected log stream selector has JSON formatted logs.',
@@ -23,12 +27,12 @@ export function getQueryHints(query: string, series: DataFrame[]): QueryHint[] {
});
}
- if (hasLogfmt && !queryWithParser) {
+ if (hasLogfmt) {
hints.push({
type: 'ADD_LOGFMT_PARSER',
label: 'Selected log stream selector has logfmt formatted logs.',
fix: {
- label: 'Consider using logfmt parser.',
+ label: 'Consider using logfmt parser to turn key-value pairs in your log lines to labels.',
action: {
type: 'ADD_LOGFMT_PARSER',
query,
@@ -38,5 +42,26 @@ export function getQueryHints(query: string, series: DataFrame[]): QueryHint[] {
}
}
+ if (queryWithParser) {
+ // To keep this simple, we consider pipeline error filtering hint only is query has up to 1 parser
+ if (parserCount === 1) {
+ const hasPipelineErrorFiltering = isQueryPipelineErrorFiltering(query);
+ const hasError = extractHasErrorLabelFromDataFrame(series[0]);
+ if (hasError && !hasPipelineErrorFiltering) {
+ hints.push({
+ type: 'ADD_NO_PIPELINE_ERROR',
+ label: 'Some logs in your selected log streams have parsing error.',
+ fix: {
+ label: 'Consider filtering out logs with parsing errors.',
+ action: {
+ type: 'ADD_NO_PIPELINE_ERROR',
+ query,
+ },
+ },
+ });
+ }
+ }
+ }
+
return hints;
}
diff --git a/public/app/plugins/datasource/loki/query_utils.test.ts b/public/app/plugins/datasource/loki/query_utils.test.ts
index 8668742b789..34c8425b1dd 100644
--- a/public/app/plugins/datasource/loki/query_utils.test.ts
+++ b/public/app/plugins/datasource/loki/query_utils.test.ts
@@ -137,13 +137,23 @@ describe('isLogsQuery', () => {
describe('isQueryWithParser', () => {
it('returns false if query without parser', () => {
- expect(isQueryWithParser('rate({job="grafana" |= "error" }[5m])')).toBe(false);
+ expect(isQueryWithParser('rate({job="grafana" |= "error" }[5m])')).toEqual({
+ parserCount: 0,
+ queryWithParser: false,
+ });
});
it('returns true if log query with parser', () => {
- expect(isQueryWithParser('{job="grafana"} | json')).toBe(true);
+ expect(isQueryWithParser('{job="grafana"} | json')).toEqual({ parserCount: 1, queryWithParser: true });
});
it('returns true if metric query with parser', () => {
- expect(isQueryWithParser('rate({job="grafana"} | json [5m])')).toBe(true);
+ expect(isQueryWithParser('rate({job="grafana"} | json [5m])')).toEqual({ parserCount: 1, queryWithParser: true });
+ });
+
+ it('returns true if query with json parser with expressions', () => {
+ expect(isQueryWithParser('rate({job="grafana"} | json foo="bar", bar="baz" [5m])')).toEqual({
+ parserCount: 1,
+ queryWithParser: true,
+ });
});
});
diff --git a/public/app/plugins/datasource/loki/query_utils.ts b/public/app/plugins/datasource/loki/query_utils.ts
index 5717053f391..a501b729042 100644
--- a/public/app/plugins/datasource/loki/query_utils.ts
+++ b/public/app/plugins/datasource/loki/query_utils.ts
@@ -121,15 +121,35 @@ export function isLogsQuery(query: string): boolean {
return isLogsQuery;
}
-export function isQueryWithParser(query: string): boolean {
- let hasParser = false;
+export function isQueryWithParser(query: string): { queryWithParser: boolean; parserCount: number } {
+ let parserCount = 0;
const tree = parser.parse(query);
tree.iterate({
enter: (type): false | void => {
- if (type.name === 'LabelParser') {
- hasParser = true;
+ if (type.name === 'LabelParser' || type.name === 'JsonExpressionParser') {
+ parserCount++;
}
},
});
- return hasParser;
+ return { queryWithParser: parserCount > 0, parserCount };
+}
+
+export function isQueryPipelineErrorFiltering(query: string): boolean {
+ let isQueryPipelineErrorFiltering = false;
+ const tree = parser.parse(query);
+ tree.iterate({
+ enter: (type, from, to, get): false | void => {
+ if (type.name === 'LabelFilter') {
+ const label = get().getChild('Matcher')?.getChild('Identifier');
+ if (label) {
+ const labelName = query.substring(label.from, label.to);
+ if (labelName === '__error__') {
+ isQueryPipelineErrorFiltering = true;
+ }
+ }
+ }
+ },
+ });
+
+ return isQueryPipelineErrorFiltering;
}
diff --git a/public/app/plugins/datasource/loki/querybuilder/LokiQueryModeller.test.ts b/public/app/plugins/datasource/loki/querybuilder/LokiQueryModeller.test.ts
index 9135368d0cd..eef0ce74dca 100644
--- a/public/app/plugins/datasource/loki/querybuilder/LokiQueryModeller.test.ts
+++ b/public/app/plugins/datasource/loki/querybuilder/LokiQueryModeller.test.ts
@@ -22,6 +22,24 @@ describe('LokiQueryModeller', () => {
).toBe('{app="grafana"} | json');
});
+ it('Can query with pipeline operation json and expression param', () => {
+ expect(
+ modeller.renderQuery({
+ labels: [{ label: 'app', op: '=', value: 'grafana' }],
+ operations: [{ id: LokiOperationId.Json, params: ['foo="bar"'] }],
+ })
+ ).toBe('{app="grafana"} | json foo="bar"');
+ });
+
+ it('Can query with pipeline operation json and multiple expression params', () => {
+ expect(
+ modeller.renderQuery({
+ labels: [{ label: 'app', op: '=', value: 'grafana' }],
+ operations: [{ id: LokiOperationId.Json, params: ['foo="bar", bar="baz"'] }],
+ })
+ ).toBe('{app="grafana"} | json foo="bar", bar="baz"');
+ });
+
it('Can query with pipeline operation logfmt', () => {
expect(
modeller.renderQuery({
diff --git a/public/app/plugins/datasource/loki/querybuilder/operations.ts b/public/app/plugins/datasource/loki/querybuilder/operations.ts
index 63e347b4751..632c8c5884f 100644
--- a/public/app/plugins/datasource/loki/querybuilder/operations.ts
+++ b/public/app/plugins/datasource/loki/querybuilder/operations.ts
@@ -64,13 +64,26 @@ export function getOperationDefinitions(): QueryBuilderOperationDef[] {
{
id: LokiOperationId.Json,
name: 'Json',
- params: [],
+ params: [
+ {
+ name: 'Expression',
+ type: 'string',
+ restParam: true,
+ optional: true,
+ minWidth: 18,
+ placeholder: 'server="servers[0]"',
+ description:
+ 'Using expressions with your json parser will extract only the specified json fields to labels. You can specify one or more expressions in this way. All expressions must be quoted.',
+ },
+ ],
defaultParams: [],
alternativesKey: 'format',
category: LokiVisualQueryOperationCategory.Formats,
orderRank: LokiOperationOrder.LineFormats,
- renderer: pipelineRenderer,
+ renderer: (model, def, innerExpr) => `${innerExpr} | json ${model.params.join(', ')}`.trim(),
addOperationHandler: addLokiOperation,
+ explainHandler: () =>
+ `This will extract keys and values from a [json](https://grafana.com/docs/loki/latest/logql/log_queries/#json) formatted log line as labels. The extracted labels can be used in label filter expressions and used as values for a range aggregation via the unwrap operation.`,
},
{
id: LokiOperationId.Logfmt,
@@ -288,6 +301,7 @@ export function getOperationDefinitions(): QueryBuilderOperationDef[] {
{ name: 'Value', type: 'string' },
],
defaultParams: ['', '=', ''],
+ alternativesKey: 'label filter',
category: LokiVisualQueryOperationCategory.LabelFilters,
orderRank: LokiOperationOrder.LabelFilters,
renderer: labelFilterRenderer,
@@ -299,6 +313,7 @@ export function getOperationDefinitions(): QueryBuilderOperationDef[] {
name: 'No pipeline errors',
params: [],
defaultParams: [],
+ alternativesKey: 'label filter',
category: LokiVisualQueryOperationCategory.LabelFilters,
orderRank: LokiOperationOrder.NoErrors,
renderer: (model, def, innerExpr) => `${innerExpr} | __error__=\`\``,
@@ -310,6 +325,7 @@ export function getOperationDefinitions(): QueryBuilderOperationDef[] {
name: 'Unwrap',
params: [{ name: 'Identifier', type: 'string', hideName: true, minWidth: 16, placeholder: 'Label key' }],
defaultParams: [''],
+ alternativesKey: 'format',
category: LokiVisualQueryOperationCategory.Formats,
orderRank: LokiOperationOrder.Unwrap,
renderer: (op, def, innerExpr) => `${innerExpr} | unwrap ${op.params[0]}`,
diff --git a/public/app/plugins/datasource/loki/querybuilder/parsing.test.ts b/public/app/plugins/datasource/loki/querybuilder/parsing.test.ts
index d12dcaa5d41..0c458a533f2 100644
--- a/public/app/plugins/datasource/loki/querybuilder/parsing.test.ts
+++ b/public/app/plugins/datasource/loki/querybuilder/parsing.test.ts
@@ -187,16 +187,20 @@ describe('buildVisualQueryFromString', () => {
);
});
- it('returns error for query with JSON expression parser', () => {
+ it('parses query with JSON parser with expression', () => {
const context = buildVisualQueryFromString('{app="frontend"} | json label="value" ');
- expect(context.errors).toEqual([
- {
- text: 'JsonExpressionParser not supported in visual query builder: json label="value"',
- from: 19,
- to: 37,
- parentType: 'PipelineStage',
- },
- ]);
+ expect(context.query).toEqual({
+ labels: [{ label: 'app', op: '=', value: 'frontend' }],
+ operations: [{ id: 'json', params: ['label="value"'] }],
+ });
+ });
+
+ it('parses query with JSON parser with multiple expressions', () => {
+ const context = buildVisualQueryFromString('{app="frontend"} | json label="value", bar="baz", foo="bar" ');
+ expect(context.query).toEqual({
+ labels: [{ label: 'app', op: '=', value: 'frontend' }],
+ operations: [{ id: 'json', params: ['label="value"', 'bar="baz"', 'foo="bar"'] }],
+ });
});
it('parses query with with simple unwrap', () => {
diff --git a/public/app/plugins/datasource/loki/querybuilder/parsing.ts b/public/app/plugins/datasource/loki/querybuilder/parsing.ts
index 469dcb4ccb7..3f9aea6340e 100644
--- a/public/app/plugins/datasource/loki/querybuilder/parsing.ts
+++ b/public/app/plugins/datasource/loki/querybuilder/parsing.ts
@@ -103,12 +103,9 @@ export function handleExpression(expr: string, node: SyntaxNode, context: Contex
}
break;
}
-
case 'JsonExpressionParser': {
- // JsonExpressionParser is not supported in query builder
- const error = 'JsonExpressionParser not supported in visual query builder';
-
- context.errors.push(createNotSupportedError(expr, node, error));
+ visQuery.operations.push(getJsonExpressionParser(expr, node));
+ break;
}
case 'LineFormatExpr': {
@@ -222,6 +219,17 @@ function getLabelParser(expr: string, node: SyntaxNode): QueryBuilderOperation {
};
}
+function getJsonExpressionParser(expr: string, node: SyntaxNode): QueryBuilderOperation {
+ const parserNode = node.getChild('Json');
+ const parser = getString(expr, parserNode);
+
+ const params = [...getAllByType(expr, node, 'JsonExpression')];
+ return {
+ id: parser,
+ params,
+ };
+}
+
function getLabelFilter(expr: string, node: SyntaxNode): { operation?: QueryBuilderOperation; error?: string } {
// Check for nodes not supported in visual builder and return error
if (node.getChild('Or') || node.getChild('And') || node.getChild('Comma')) {
diff --git a/public/app/plugins/datasource/loki/responseUtils.ts b/public/app/plugins/datasource/loki/responseUtils.ts
index e0eb15665af..fc2b7b904e0 100644
--- a/public/app/plugins/datasource/loki/responseUtils.ts
+++ b/public/app/plugins/datasource/loki/responseUtils.ts
@@ -27,3 +27,13 @@ export function extractLogParserFromDataFrame(frame: DataFrame): { hasLogfmt: bo
return { hasLogfmt, hasJSON };
}
+
+export function extractHasErrorLabelFromDataFrame(frame: DataFrame): boolean {
+ const labelField = frame.fields.find((field) => field.name === 'labels' && field.type === FieldType.other);
+ if (labelField == null) {
+ return false;
+ }
+
+ const labels: Array<{ [key: string]: string }> = labelField.values.toArray();
+ return labels.some((label) => label['__error__']);
+}
diff --git a/public/app/plugins/datasource/mssql/config_ctrl.ts b/public/app/plugins/datasource/mssql/config_ctrl.ts
deleted file mode 100644
index 0bc9df6e95b..00000000000
--- a/public/app/plugins/datasource/mssql/config_ctrl.ts
+++ /dev/null
@@ -1,47 +0,0 @@
-import {
- createChangeHandler,
- createResetHandler,
- PasswordFieldEnum,
-} from '../../../features/datasources/utils/passwordHandlers';
-
-export class MssqlConfigCtrl {
- static templateUrl = 'partials/config.html';
-
- // Set through angular bindings
- declare current: any;
-
- onPasswordReset: ReturnType;
- onPasswordChange: ReturnType;
- showUserCredentials = false;
- showTlsConfig = false;
- showCertificateConfig = false;
-
- /** @ngInject */
- constructor($scope: any) {
- this.current = $scope.ctrl.current;
- this.current.jsonData.encrypt = this.current.jsonData.encrypt || 'false';
- this.current.jsonData.sslRootCertFile = this.current.jsonData.sslRootCertFile || '';
- this.current.jsonData.tlsSkipVerify = this.current.jsonData.tlsSkipVerify || false;
- this.current.jsonData.serverName = this.current.jsonData.serverName || '';
- this.current.jsonData.authenticationType = this.current.jsonData.authenticationType || 'SQL Server Authentication';
- this.onPasswordReset = createResetHandler(this, PasswordFieldEnum.Password);
- this.onPasswordChange = createChangeHandler(this, PasswordFieldEnum.Password);
- this.onAuthenticationTypeChange();
- this.onEncryptChange();
- }
-
- onAuthenticationTypeChange() {
- // This is using the fallback in https://github.com/denisenkom/go-mssqldb to use Windows Auth if login/user id is empty.
- if (this.current.jsonData.authenticationType === 'Windows Authentication') {
- this.current.user = '';
- this.current.password = '';
- }
-
- this.showUserCredentials = this.current.jsonData.authenticationType !== 'Windows Authentication';
- }
-
- onEncryptChange() {
- this.showTlsConfig = this.current.jsonData.encrypt === 'true';
- this.showCertificateConfig = this.showTlsConfig && this.current.jsonData.tlsSkipVerify === false;
- }
-}
diff --git a/public/app/plugins/datasource/mssql/configuration/ConfigurationEditor.tsx b/public/app/plugins/datasource/mssql/configuration/ConfigurationEditor.tsx
new file mode 100644
index 00000000000..911b8f60d04
--- /dev/null
+++ b/public/app/plugins/datasource/mssql/configuration/ConfigurationEditor.tsx
@@ -0,0 +1,262 @@
+import { css } from '@emotion/css';
+import React, { SyntheticEvent } from 'react';
+
+import {
+ DataSourcePluginOptionsEditorProps,
+ GrafanaTheme2,
+ onUpdateDatasourceJsonDataOption,
+ onUpdateDatasourceSecureJsonDataOption,
+ SelectableValue,
+ updateDatasourcePluginJsonDataOption,
+ updateDatasourcePluginResetOption,
+} from '@grafana/data';
+import {
+ Alert,
+ FieldSet,
+ InlineField,
+ InlineFieldRow,
+ InlineSwitch,
+ Input,
+ SecretInput,
+ Select,
+ useStyles2,
+} from '@grafana/ui';
+import { ConnectionLimits } from 'app/features/plugins/sql/components/configuration/ConnectionLimits';
+
+import { MSSQLAuthenticationType, MSSQLEncryptOptions, MssqlOptions } from '../types';
+
+export const ConfigurationEditor = (props: DataSourcePluginOptionsEditorProps) => {
+ const { options, onOptionsChange } = props;
+ const styles = useStyles2(getStyles);
+ const jsonData = options.jsonData;
+
+ const onResetPassword = () => {
+ updateDatasourcePluginResetOption(props, 'password');
+ };
+
+ const onDSOptionChanged = (property: keyof MssqlOptions) => {
+ return (event: SyntheticEvent) => {
+ onOptionsChange({ ...options, ...{ [property]: event.currentTarget.value } });
+ };
+ };
+
+ const onSkipTLSVerifyChanged = (event: SyntheticEvent) => {
+ updateDatasourcePluginJsonDataOption(props, 'tlsSkipVerify', event.currentTarget.checked);
+ };
+
+ const onEncryptChanged = (value: SelectableValue) => {
+ updateDatasourcePluginJsonDataOption(props, 'encrypt', value.value);
+ };
+
+ const onAuthenticationMethodChanged = (value: SelectableValue) => {
+ onOptionsChange({
+ ...options,
+ ...{
+ jsonData: { ...jsonData, ...{ authenticationType: value.value } },
+ secureJsonData: { ...options.secureJsonData, ...{ password: '' } },
+ secureJsonFields: { ...options.secureJsonFields, ...{ password: false } },
+ user: '',
+ },
+ });
+ };
+
+ const authenticationOptions: Array> = [
+ { value: MSSQLAuthenticationType.sqlAuth, label: 'SQL Server Authentication' },
+ { value: MSSQLAuthenticationType.windowsAuth, label: 'Windows Authentication' },
+ ];
+
+ const encryptOptions: Array> = [
+ { value: MSSQLEncryptOptions.disable, label: 'disable' },
+ { value: MSSQLEncryptOptions.false, label: 'false' },
+ { value: MSSQLEncryptOptions.true, label: 'true' },
+ ];
+
+ const shortWidth = 15;
+ const longWidth = 46;
+ const labelWidthSSL = 25;
+
+ return (
+ <>
+
+
+
+
+
+
+
+
+
+ SQL Server Authentication This is the default mechanism to connect to MS SQL Server. Enter the
+ SQL Server Authentication login or the Windows Authentication login in the DOMAIN\User format.
+
+
+ Windows Authentication Windows Integrated Security - single sign on for users who are already
+ logged onto Windows and have enabled this option for MS SQL Server.
+
+
+ }
+ >
+
+
+ {jsonData.authenticationType === MSSQLAuthenticationType.windowsAuth ? null : (
+
+
+
+
+
+
+
+
+ )}
+
+
+
+
+ Determines whether or to which extent a secure SSL TCP/IP connection will be negotiated with the server.
+
+
+ disable - Data sent between client and server is not encrypted.
+
+
+ false - Data sent between client and server is not encrypted beyond the login packet. (default)
+
+
+ true - Data sent between client and server is encrypted.
+
+
+ If you're using an older version of Microsoft SQL Server like 2008 and 2008R2 you may need to disable
+ encryption to be able to connect.
+ >
+ }
+ label="Encrypt"
+ >
+
+
+
+ {jsonData.encrypt === MSSQLEncryptOptions.true ? (
+ <>
+
+
+
+ {jsonData.tlsSkipVerify ? null : (
+ <>
+
+ Path to file containing the public key certificate of the CA that signed the SQL Server
+ certificate. Needed when the server certificate is self signed.
+
+ }
+ label="TLS/SSL Root Certificate"
+ >
+
+
+
+
+
+ >
+ )}
+ >
+ ) : null}
+
+
+ {
+ updateDatasourcePluginJsonDataOption(props, property, value);
+ }}
+ >
+
+
+
+ A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example
+ 1m
if your data is written every minute.
+
+ }
+ label="Min time interval"
+ >
+
+
+
+
+
+ The database user should only be granted SELECT permissions on the specified database and tables you want to
+ query. Grafana does not validate that queries are safe so queries can contain any SQL statement. For example,
+ statements like USE otherdb;
and DROP TABLE user;
would be executed. To protect
+ against this we highly recommmend you create a specific MS SQL user with restricted permissions.
+
+ >
+ );
+};
+
+function getStyles(theme: GrafanaTheme2) {
+ return {
+ ulPadding: css({
+ margin: theme.spacing(1, 0),
+ paddingLeft: theme.spacing(5),
+ }),
+ };
+}
diff --git a/public/app/plugins/datasource/mssql/module.ts b/public/app/plugins/datasource/mssql/module.ts
index 9940f7d30aa..1f09748e344 100644
--- a/public/app/plugins/datasource/mssql/module.ts
+++ b/public/app/plugins/datasource/mssql/module.ts
@@ -2,9 +2,10 @@ import { DataSourcePlugin } from '@grafana/data';
import { SqlQueryEditor } from 'app/features/plugins/sql/components/QueryEditor';
import { SQLQuery } from 'app/features/plugins/sql/types';
-import { MssqlConfigCtrl } from './config_ctrl';
+import { ConfigurationEditor } from './configuration/ConfigurationEditor';
import { MssqlDatasource } from './datasource';
+import { MssqlOptions } from './types';
-export const plugin = new DataSourcePlugin(MssqlDatasource)
+export const plugin = new DataSourcePlugin(MssqlDatasource)
.setQueryEditor(SqlQueryEditor)
- .setConfigCtrl(MssqlConfigCtrl);
+ .setConfigEditor(ConfigurationEditor);
diff --git a/public/app/plugins/datasource/mssql/partials/config.html b/public/app/plugins/datasource/mssql/partials/config.html
deleted file mode 100644
index 680ba4dd75b..00000000000
--- a/public/app/plugins/datasource/mssql/partials/config.html
+++ /dev/null
@@ -1,161 +0,0 @@
-MS SQL connection
-
-
-
-TLS/SSL Auth
-
-
-
-Connection limits
-
-
-
-MS SQL details
-
-
-
-
diff --git a/public/app/plugins/datasource/mssql/types.ts b/public/app/plugins/datasource/mssql/types.ts
new file mode 100644
index 00000000000..bdf6e44746a
--- /dev/null
+++ b/public/app/plugins/datasource/mssql/types.ts
@@ -0,0 +1,24 @@
+import { DataSourceJsonData } from '@grafana/data';
+import { SQLConnectionLimits } from 'app/features/plugins/sql/components/configuration/types';
+
+export enum MSSQLAuthenticationType {
+ sqlAuth = 'SQL Server Authentication',
+ windowsAuth = 'Windows Authentication',
+}
+
+export enum MSSQLEncryptOptions {
+ disable = 'disable',
+ false = 'false',
+ true = 'true',
+}
+export interface MssqlOptions extends DataSourceJsonData, SQLConnectionLimits {
+ authenticationType: MSSQLAuthenticationType;
+ encrypt: MSSQLEncryptOptions;
+ serverName: string;
+ sslRootCertFile: string;
+ tlsSkipVerify: boolean;
+ url: string;
+ database: string;
+ timeInterval: string;
+ user: string;
+}
diff --git a/public/app/plugins/datasource/mysql/configuration/ConfigurationEditor.tsx b/public/app/plugins/datasource/mysql/configuration/ConfigurationEditor.tsx
new file mode 100644
index 00000000000..e26efcfc1ba
--- /dev/null
+++ b/public/app/plugins/datasource/mysql/configuration/ConfigurationEditor.tsx
@@ -0,0 +1,181 @@
+import React, { SyntheticEvent } from 'react';
+
+import {
+ DataSourcePluginOptionsEditorProps,
+ onUpdateDatasourceJsonDataOption,
+ onUpdateDatasourceSecureJsonDataOption,
+ updateDatasourcePluginJsonDataOption,
+ updateDatasourcePluginResetOption,
+} from '@grafana/data';
+import { Alert, FieldSet, InlineField, InlineFieldRow, InlineSwitch, Input, Link, SecretInput } from '@grafana/ui';
+import { ConnectionLimits } from 'app/features/plugins/sql/components/configuration/ConnectionLimits';
+import { TLSSecretsConfig } from 'app/features/plugins/sql/components/configuration/TLSSecretsConfig';
+
+import { MySQLOptions } from '../types';
+
+export const ConfigurationEditor = (props: DataSourcePluginOptionsEditorProps) => {
+ const { options, onOptionsChange } = props;
+ const jsonData = options.jsonData;
+
+ const onResetPassword = () => {
+ updateDatasourcePluginResetOption(props, 'password');
+ };
+
+ const onDSOptionChanged = (property: keyof MySQLOptions) => {
+ return (event: SyntheticEvent) => {
+ onOptionsChange({ ...options, ...{ [property]: event.currentTarget.value } });
+ };
+ };
+
+ const onSwitchChanged = (property: keyof MySQLOptions) => {
+ return (event: SyntheticEvent) => {
+ updateDatasourcePluginJsonDataOption(props, property, event.currentTarget.checked);
+ };
+ };
+
+ const mediumWidth = 20;
+ const shortWidth = 15;
+ const longWidth = 40;
+
+ return (
+ <>
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ Specify the time zone used in the database session, e.g. Europe/Berlin
or
+ +02:00
. This is necessary, if the timezone of the database (or the host of the database) is
+ set to something other than UTC. The value is set in the session with
+ SET time_zone='...'
. If you leave this field empty, the timezone is not updated.
+ You can find more information in the MySQL documentation.
+
+ }
+ label="Session timezone"
+ labelWidth={mediumWidth}
+ >
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+ {options.jsonData.tlsAuth ? (
+
+
+
+ ) : null}
+
+ {
+ updateDatasourcePluginJsonDataOption(props, property, value);
+ }}
+ >
+
+
+
+ A lower limit for the auto group by time interval. Recommended to be set to write frequency, for example
+ 1m
if your data is written every minute.
+
+ }
+ labelWidth={mediumWidth}
+ label="Min time interval"
+ >
+
+
+
+
+
+ The database user should only be granted SELECT permissions on the specified database & tables you want to
+ query. Grafana does not validate that queries are safe so queries can contain any SQL statement. For example,
+ statements like USE otherdb;
and DROP TABLE user;
would be executed. To protect
+ against this we
+ Highly recommmend you create a specific MySQL user with restricted permissions. Checkout the{' '}
+
+ MySQL Data Source Docs
+
+ for more information.
+
+ >
+ );
+};
diff --git a/public/app/plugins/datasource/mysql/module.ts b/public/app/plugins/datasource/mysql/module.ts
index e412a473cc5..7044b6dd35b 100644
--- a/public/app/plugins/datasource/mysql/module.ts
+++ b/public/app/plugins/datasource/mysql/module.ts
@@ -1,27 +1,10 @@
import { DataSourcePlugin } from '@grafana/data';
-import {
- createChangeHandler,
- createResetHandler,
- PasswordFieldEnum,
-} from '../../../features/datasources/utils/passwordHandlers';
-
+import { ConfigurationEditor } from './configuration/ConfigurationEditor';
import { MysqlDatasource } from './datasource';
import { MysqlQueryCtrl } from './query_ctrl';
import { MySQLQuery } from './types';
-class MysqlConfigCtrl {
- static templateUrl = 'partials/config.html';
- current: any;
- onPasswordReset: ReturnType;
- onPasswordChange: ReturnType;
-
- constructor() {
- this.onPasswordReset = createResetHandler(this, PasswordFieldEnum.Password);
- this.onPasswordChange = createChangeHandler(this, PasswordFieldEnum.Password);
- }
-}
-
const defaultQuery = `SELECT
UNIX_TIMESTAMP() as time_sec,
as text,
@@ -48,11 +31,10 @@ export {
MysqlDatasource,
MysqlDatasource as Datasource,
MysqlQueryCtrl as QueryCtrl,
- MysqlConfigCtrl as ConfigCtrl,
MysqlAnnotationsQueryCtrl as AnnotationsQueryCtrl,
};
export const plugin = new DataSourcePlugin(MysqlDatasource)
.setQueryCtrl(MysqlQueryCtrl)
- .setConfigCtrl(MysqlConfigCtrl)
+ .setConfigEditor(ConfigurationEditor)
.setAnnotationQueryCtrl(MysqlAnnotationsQueryCtrl);
diff --git a/public/app/plugins/datasource/mysql/partials/config.html b/public/app/plugins/datasource/mysql/partials/config.html
deleted file mode 100644
index c8d8afbc64f..00000000000
--- a/public/app/plugins/datasource/mysql/partials/config.html
+++ /dev/null
@@ -1,132 +0,0 @@
-MySQL Connection
-
-