From ca60bf298c5f8369d2c520246237c3bc93f1aa24 Mon Sep 17 00:00:00 2001 From: Dustin Hooten Date: Mon, 11 Nov 2019 14:42:24 -0700 Subject: [PATCH] React UI: Implement /targets page (#6276) * Add LastScrapeDuration to targets endpoint Signed-off-by: Dustin Hooten * Add Scrape job name to targets endpoint Signed-off-by: Dustin Hooten * Implement the /targets page in react Signed-off-by: Dustin Hooten * Add state query param to targets endpoint Signed-off-by: Dustin Hooten * Use state filter in api call Signed-off-by: Dustin Hooten * api feedback Signed-off-by: Dustin Hooten * pr feedback frontend Signed-off-by: Dustin Hooten * Implement and use localstorage hook Signed-off-by: Dustin Hooten * PR feedback Signed-off-by: Dustin Hooten --- docs/querying/api.md | 39 +++- scrape/scrape.go | 4 +- scrape/scrape_test.go | 2 +- scrape/target.go | 3 +- web/api/v1/api.go | 80 ++++--- web/api/v1/api_test.go | 166 +++++++++++--- .../src/hooks/useLocalStorage.test.tsx | 27 +++ .../react-app/src/hooks/useLocalStorage.tsx | 13 ++ web/ui/react-app/src/pages/Targets.tsx | 15 -- web/ui/react-app/src/pages/index.ts | 2 +- .../src/pages/targets/EndpointLink.test.tsx | 36 +++ .../src/pages/targets/EndpointLink.tsx | 38 ++++ .../src/pages/targets/Filter.module.css | 4 + .../src/pages/targets/Filter.test.tsx | 45 ++++ web/ui/react-app/src/pages/targets/Filter.tsx | 39 ++++ .../src/pages/targets/ScrapePoolList.test.tsx | 98 ++++++++ .../src/pages/targets/ScrapePoolList.tsx | 55 +++++ .../pages/targets/ScrapePoolPanel.module.css | 43 ++++ .../pages/targets/ScrapePoolPanel.test.tsx | 142 ++++++++++++ .../src/pages/targets/ScrapePoolPanel.tsx | 95 ++++++++ .../src/pages/targets/TargetLabels.module.css | 3 + .../src/pages/targets/TargetLabels.test.tsx | 50 ++++ .../src/pages/targets/TargetLabels.tsx | 48 ++++ .../src/pages/targets/Targets.test.tsx | 33 +++ .../react-app/src/pages/targets/Targets.tsx | 22 ++ .../__snapshots__/TargetLabels.test.tsx.snap | 81 +++++++ .../pages/targets/__testdata__/testdata.ts | 215 ++++++++++++++++++ .../src/pages/targets/target.test.ts | 45 ++++ web/ui/react-app/src/pages/targets/target.ts | 49 ++++ web/ui/react-app/src/utils/timeFormat.test.ts | 47 +++- web/ui/react-app/src/utils/timeFormat.ts | 37 +++ 31 files changed, 1499 insertions(+), 77 deletions(-) create mode 100644 web/ui/react-app/src/hooks/useLocalStorage.test.tsx create mode 100644 web/ui/react-app/src/hooks/useLocalStorage.tsx delete mode 100644 web/ui/react-app/src/pages/Targets.tsx create mode 100644 web/ui/react-app/src/pages/targets/EndpointLink.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/EndpointLink.tsx create mode 100644 web/ui/react-app/src/pages/targets/Filter.module.css create mode 100644 web/ui/react-app/src/pages/targets/Filter.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/Filter.tsx create mode 100644 web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/ScrapePoolList.tsx create mode 100644 web/ui/react-app/src/pages/targets/ScrapePoolPanel.module.css create mode 100644 web/ui/react-app/src/pages/targets/ScrapePoolPanel.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/ScrapePoolPanel.tsx create mode 100644 web/ui/react-app/src/pages/targets/TargetLabels.module.css create mode 100644 web/ui/react-app/src/pages/targets/TargetLabels.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/TargetLabels.tsx create mode 100644 web/ui/react-app/src/pages/targets/Targets.test.tsx create mode 100644 web/ui/react-app/src/pages/targets/Targets.tsx create mode 100644 web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap create mode 100644 web/ui/react-app/src/pages/targets/__testdata__/testdata.ts create mode 100644 web/ui/react-app/src/pages/targets/target.test.ts create mode 100644 web/ui/react-app/src/pages/targets/target.ts diff --git a/docs/querying/api.md b/docs/querying/api.md index 18da0bd294..76e9e231f0 100644 --- a/docs/querying/api.md +++ b/docs/querying/api.md @@ -390,7 +390,7 @@ Prometheus target discovery: GET /api/v1/targets ``` -Both the active and dropped targets are part of the response. +Both the active and dropped targets are part of the response by default. `labels` represents the label set after relabelling has occurred. `discoveredLabels` represent the unmodified labels retrieved during service discovery before relabelling has occurred. @@ -411,9 +411,11 @@ $ curl http://localhost:9090/api/v1/targets "instance": "127.0.0.1:9090", "job": "prometheus" }, + "scrapePool": "prometheus", "scrapeUrl": "http://127.0.0.1:9090/metrics", "lastError": "", "lastScrape": "2017-01-17T15:07:44.723715405+01:00", + "lastScrapeDuration": 0.050688943, "health": "up" } ], @@ -431,6 +433,41 @@ $ curl http://localhost:9090/api/v1/targets } ``` +The `state` query parameter allows the caller to filter by active or dropped targets, +(e.g., `state=active`, `state=dropped`, `state=any`). +Note that an empty array is still returned for targets that are filtered out. +Other values are ignored. + +```json +$ curl 'http://localhost:9090/api/v1/targets?state=active' +{ + "status": "success", + "data": { + "activeTargets": [ + { + "discoveredLabels": { + "__address__": "127.0.0.1:9090", + "__metrics_path__": "/metrics", + "__scheme__": "http", + "job": "prometheus" + }, + "labels": { + "instance": "127.0.0.1:9090", + "job": "prometheus" + }, + "scrapePool": "prometheus", + "scrapeUrl": "http://127.0.0.1:9090/metrics", + "lastError": "", + "lastScrape": "2017-01-17T15:07:44.723715405+01:00", + "lastScrapeDuration": 50688943, + "health": "up" + } + ], + "droppedTargets": [] + } +} +``` + ## Rules diff --git a/scrape/scrape.go b/scrape/scrape.go index b85309cd0d..7905f2644b 100644 --- a/scrape/scrape.go +++ b/scrape/scrape.go @@ -499,7 +499,7 @@ func appender(app storage.Appender, limit int) storage.Appender { // A scraper retrieves samples and accepts a status report at the end. type scraper interface { scrape(ctx context.Context, w io.Writer) (string, error) - report(start time.Time, dur time.Duration, err error) + Report(start time.Time, dur time.Duration, err error) offset(interval time.Duration, jitterSeed uint64) time.Duration } @@ -1212,7 +1212,7 @@ const ( ) func (sl *scrapeLoop) report(start time.Time, duration time.Duration, scraped, appended, seriesAdded int, err error) error { - sl.scraper.report(start, duration, err) + sl.scraper.Report(start, duration, err) ts := timestamp.FromTime(start) diff --git a/scrape/scrape_test.go b/scrape/scrape_test.go index d872556433..02ddca84d9 100644 --- a/scrape/scrape_test.go +++ b/scrape/scrape_test.go @@ -1450,7 +1450,7 @@ func (ts *testScraper) offset(interval time.Duration, jitterSeed uint64) time.Du return ts.offsetDur } -func (ts *testScraper) report(start time.Time, duration time.Duration, err error) { +func (ts *testScraper) Report(start time.Time, duration time.Duration, err error) { ts.lastStart = start ts.lastDuration = duration ts.lastError = err diff --git a/scrape/target.go b/scrape/target.go index 0a264d5519..d3a6a379e5 100644 --- a/scrape/target.go +++ b/scrape/target.go @@ -200,7 +200,8 @@ func (t *Target) URL() *url.URL { } } -func (t *Target) report(start time.Time, dur time.Duration, err error) { +// Report sets target data about the last scrape. +func (t *Target) Report(start time.Time, dur time.Duration, err error) { t.mtx.Lock() defer t.mtx.Unlock() diff --git a/web/api/v1/api.go b/web/api/v1/api.go index 61865d70cc..0f33d33b65 100644 --- a/web/api/v1/api.go +++ b/web/api/v1/api.go @@ -25,6 +25,7 @@ import ( "regexp" "sort" "strconv" + "strings" "time" "unsafe" @@ -562,11 +563,13 @@ type Target struct { // Any labels that are added to this target and its metrics. Labels map[string]string `json:"labels"` - ScrapeURL string `json:"scrapeUrl"` + ScrapePool string `json:"scrapePool"` + ScrapeURL string `json:"scrapeUrl"` - LastError string `json:"lastError"` - LastScrape time.Time `json:"lastScrape"` - Health scrape.TargetHealth `json:"health"` + LastError string `json:"lastError"` + LastScrape time.Time `json:"lastScrape"` + LastScrapeDuration float64 `json:"lastScrapeDuration"` + Health scrape.TargetHealth `json:"health"` } // DroppedTarget has the information for one target that was dropped during relabelling. @@ -582,7 +585,7 @@ type TargetDiscovery struct { } func (api *API) targets(r *http.Request) apiFuncResult { - flatten := func(targets map[string][]*scrape.Target) []*scrape.Target { + sortKeys := func(targets map[string][]*scrape.Target) ([]string, int) { var n int keys := make([]string, 0, len(targets)) for k := range targets { @@ -590,6 +593,11 @@ func (api *API) targets(r *http.Request) apiFuncResult { n += len(targets[k]) } sort.Strings(keys) + return keys, n + } + + flatten := func(targets map[string][]*scrape.Target) []*scrape.Target { + keys, n := sortKeys(targets) res := make([]*scrape.Target, 0, n) for _, k := range keys { res = append(res, targets[k]...) @@ -597,31 +605,49 @@ func (api *API) targets(r *http.Request) apiFuncResult { return res } - tActive := flatten(api.targetRetriever.TargetsActive()) - tDropped := flatten(api.targetRetriever.TargetsDropped()) - res := &TargetDiscovery{ActiveTargets: make([]*Target, 0, len(tActive)), DroppedTargets: make([]*DroppedTarget, 0, len(tDropped))} + state := strings.ToLower(r.URL.Query().Get("state")) + showActive := state == "" || state == "any" || state == "active" + showDropped := state == "" || state == "any" || state == "dropped" + res := &TargetDiscovery{} - for _, target := range tActive { - lastErrStr := "" - lastErr := target.LastError() - if lastErr != nil { - lastErrStr = lastErr.Error() - } + if showActive { + targetsActive := api.targetRetriever.TargetsActive() + activeKeys, numTargets := sortKeys(targetsActive) + res.ActiveTargets = make([]*Target, 0, numTargets) - res.ActiveTargets = append(res.ActiveTargets, &Target{ - DiscoveredLabels: target.DiscoveredLabels().Map(), - Labels: target.Labels().Map(), - ScrapeURL: target.URL().String(), - LastError: lastErrStr, - LastScrape: target.LastScrape(), - Health: target.Health(), - }) - } + for _, key := range activeKeys { + for _, target := range targetsActive[key] { + lastErrStr := "" + lastErr := target.LastError() + if lastErr != nil { + lastErrStr = lastErr.Error() + } - for _, t := range tDropped { - res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{ - DiscoveredLabels: t.DiscoveredLabels().Map(), - }) + res.ActiveTargets = append(res.ActiveTargets, &Target{ + DiscoveredLabels: target.DiscoveredLabels().Map(), + Labels: target.Labels().Map(), + ScrapePool: key, + ScrapeURL: target.URL().String(), + LastError: lastErrStr, + LastScrape: target.LastScrape(), + LastScrapeDuration: target.LastScrapeDuration().Seconds(), + Health: target.Health(), + }) + } + } + } else { + res.ActiveTargets = []*Target{} + } + if showDropped { + tDropped := flatten(api.targetRetriever.TargetsDropped()) + res.DroppedTargets = make([]*DroppedTarget, 0, len(tDropped)) + for _, t := range tDropped { + res.DroppedTargets = append(res.DroppedTargets, &DroppedTarget{ + DiscoveredLabels: t.DiscoveredLabels().Map(), + }) + } + } else { + res.DroppedTargets = []*DroppedTarget{} } return apiFuncResult{res, nil, nil, nil} } diff --git a/web/api/v1/api_test.go b/web/api/v1/api_test.go index 1c155d703b..78160e388c 100644 --- a/web/api/v1/api_test.go +++ b/web/api/v1/api_test.go @@ -57,32 +57,36 @@ import ( type testTargetRetriever struct{} +var ( + scrapeStart = time.Now().Add(-11 * time.Second) +) + func (t testTargetRetriever) TargetsActive() map[string][]*scrape.Target { + testTarget := scrape.NewTarget( + labels.FromMap(map[string]string{ + model.SchemeLabel: "http", + model.AddressLabel: "example.com:8080", + model.MetricsPathLabel: "/metrics", + model.JobLabel: "test", + }), + nil, + url.Values{}, + ) + testTarget.Report(scrapeStart, 70*time.Millisecond, nil) + blackboxTarget := scrape.NewTarget( + labels.FromMap(map[string]string{ + model.SchemeLabel: "http", + model.AddressLabel: "localhost:9115", + model.MetricsPathLabel: "/probe", + model.JobLabel: "blackbox", + }), + nil, + url.Values{"target": []string{"example.com"}}, + ) + blackboxTarget.Report(scrapeStart, 100*time.Millisecond, errors.New("failed")) return map[string][]*scrape.Target{ - "test": { - scrape.NewTarget( - labels.FromMap(map[string]string{ - model.SchemeLabel: "http", - model.AddressLabel: "example.com:8080", - model.MetricsPathLabel: "/metrics", - model.JobLabel: "test", - }), - nil, - url.Values{}, - ), - }, - "blackbox": { - scrape.NewTarget( - labels.FromMap(map[string]string{ - model.SchemeLabel: "http", - model.AddressLabel: "localhost:9115", - model.MetricsPathLabel: "/probe", - model.JobLabel: "blackbox", - }), - nil, - url.Values{"target": []string{"example.com"}}, - ), - }, + "test": {testTarget}, + "blackbox": {blackboxTarget}, } } func (t testTargetRetriever) TargetsDropped() map[string][]*scrape.Target { @@ -699,18 +703,124 @@ func testEndpoints(t *testing.T, api *API, testLabelAPI bool) { Labels: map[string]string{ "job": "blackbox", }, - ScrapeURL: "http://localhost:9115/probe?target=example.com", - Health: "unknown", + ScrapePool: "blackbox", + ScrapeURL: "http://localhost:9115/probe?target=example.com", + Health: "down", + LastError: "failed", + LastScrape: scrapeStart, + LastScrapeDuration: 0.1, }, { DiscoveredLabels: map[string]string{}, Labels: map[string]string{ "job": "test", }, - ScrapeURL: "http://example.com:8080/metrics", - Health: "unknown", + ScrapePool: "test", + ScrapeURL: "http://example.com:8080/metrics", + Health: "up", + LastError: "", + LastScrape: scrapeStart, + LastScrapeDuration: 0.07, + }, + }, + DroppedTargets: []*DroppedTarget{ + { + DiscoveredLabels: map[string]string{ + "__address__": "http://dropped.example.com:9115", + "__metrics_path__": "/probe", + "__scheme__": "http", + "job": "blackbox", + }, }, }, + }, + }, + { + endpoint: api.targets, + query: url.Values{ + "state": []string{"any"}, + }, + response: &TargetDiscovery{ + ActiveTargets: []*Target{ + { + DiscoveredLabels: map[string]string{}, + Labels: map[string]string{ + "job": "blackbox", + }, + ScrapePool: "blackbox", + ScrapeURL: "http://localhost:9115/probe?target=example.com", + Health: "down", + LastError: "failed", + LastScrape: scrapeStart, + LastScrapeDuration: 0.1, + }, + { + DiscoveredLabels: map[string]string{}, + Labels: map[string]string{ + "job": "test", + }, + ScrapePool: "test", + ScrapeURL: "http://example.com:8080/metrics", + Health: "up", + LastError: "", + LastScrape: scrapeStart, + LastScrapeDuration: 0.07, + }, + }, + DroppedTargets: []*DroppedTarget{ + { + DiscoveredLabels: map[string]string{ + "__address__": "http://dropped.example.com:9115", + "__metrics_path__": "/probe", + "__scheme__": "http", + "job": "blackbox", + }, + }, + }, + }, + }, + { + endpoint: api.targets, + query: url.Values{ + "state": []string{"active"}, + }, + response: &TargetDiscovery{ + ActiveTargets: []*Target{ + { + DiscoveredLabels: map[string]string{}, + Labels: map[string]string{ + "job": "blackbox", + }, + ScrapePool: "blackbox", + ScrapeURL: "http://localhost:9115/probe?target=example.com", + Health: "down", + LastError: "failed", + LastScrape: scrapeStart, + LastScrapeDuration: 0.1, + }, + { + DiscoveredLabels: map[string]string{}, + Labels: map[string]string{ + "job": "test", + }, + ScrapePool: "test", + ScrapeURL: "http://example.com:8080/metrics", + Health: "up", + LastError: "", + LastScrape: scrapeStart, + LastScrapeDuration: 0.07, + }, + }, + DroppedTargets: []*DroppedTarget{}, + }, + }, + { + endpoint: api.targets, + query: url.Values{ + "state": []string{"Dropped"}, + }, + response: &TargetDiscovery{ + ActiveTargets: []*Target{}, DroppedTargets: []*DroppedTarget{ { DiscoveredLabels: map[string]string{ diff --git a/web/ui/react-app/src/hooks/useLocalStorage.test.tsx b/web/ui/react-app/src/hooks/useLocalStorage.test.tsx new file mode 100644 index 0000000000..680be78507 --- /dev/null +++ b/web/ui/react-app/src/hooks/useLocalStorage.test.tsx @@ -0,0 +1,27 @@ +import { useLocalStorage } from './useLocalStorage'; +import { renderHook, act } from '@testing-library/react-hooks'; + +describe('useLocalStorage', () => { + it('returns the initialState', () => { + const initialState = { a: 1, b: 2 }; + const { result } = renderHook(() => useLocalStorage('mystorage', initialState)); + expect(result.current[0]).toEqual(initialState); + }); + it('stores the initialState as serialized json in localstorage', () => { + const key = 'mystorage'; + const initialState = { a: 1, b: 2 }; + renderHook(() => useLocalStorage(key, initialState)); + expect(localStorage.getItem(key)).toEqual(JSON.stringify(initialState)); + }); + it('returns a setValue function that can reset local storage', () => { + const key = 'mystorage'; + const initialState = { a: 1, b: 2 }; + const { result } = renderHook(() => useLocalStorage(key, initialState)); + const newValue = { a: 2, b: 5 }; + act(() => { + result.current[1](newValue); + }); + expect(result.current[0]).toEqual(newValue); + expect(localStorage.getItem(key)).toEqual(JSON.stringify(newValue)); + }); +}); diff --git a/web/ui/react-app/src/hooks/useLocalStorage.tsx b/web/ui/react-app/src/hooks/useLocalStorage.tsx new file mode 100644 index 0000000000..91b85a3279 --- /dev/null +++ b/web/ui/react-app/src/hooks/useLocalStorage.tsx @@ -0,0 +1,13 @@ +import { Dispatch, SetStateAction, useEffect, useState } from 'react'; + +export function useLocalStorage(localStorageKey: string, initialState: S): [S, Dispatch>] { + const localStorageState = JSON.parse(localStorage.getItem(localStorageKey) as string); + const [value, setValue] = useState(localStorageState || initialState); + + useEffect(() => { + const serializedState = JSON.stringify(value); + localStorage.setItem(localStorageKey, serializedState); + }, [localStorageKey, value]); + + return [value, setValue]; +} diff --git a/web/ui/react-app/src/pages/Targets.tsx b/web/ui/react-app/src/pages/Targets.tsx deleted file mode 100644 index 6351f07512..0000000000 --- a/web/ui/react-app/src/pages/Targets.tsx +++ /dev/null @@ -1,15 +0,0 @@ -import React, { FC } from 'react'; -import { RouteComponentProps } from '@reach/router'; -import PathPrefixProps from '../PathPrefixProps'; -import { Alert } from 'reactstrap'; - -const Targets: FC = ({ pathPrefix }) => ( - <> -

Targets

- - This page is still under construction. Please try it in the Classic UI. - - -); - -export default Targets; diff --git a/web/ui/react-app/src/pages/index.ts b/web/ui/react-app/src/pages/index.ts index f286ed750c..b9dd5e9cc2 100644 --- a/web/ui/react-app/src/pages/index.ts +++ b/web/ui/react-app/src/pages/index.ts @@ -4,7 +4,7 @@ import Flags from './Flags'; import Rules from './Rules'; import Services from './Services'; import Status from './Status'; -import Targets from './Targets'; +import Targets from './targets/Targets'; import PanelList from './PanelList'; export { Alerts, Config, Flags, Rules, Services, Status, Targets, PanelList }; diff --git a/web/ui/react-app/src/pages/targets/EndpointLink.test.tsx b/web/ui/react-app/src/pages/targets/EndpointLink.test.tsx new file mode 100644 index 0000000000..4ad68b96c3 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/EndpointLink.test.tsx @@ -0,0 +1,36 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import { Badge, Alert } from 'reactstrap'; +import EndpointLink from './EndpointLink'; + +describe('EndpointLink', () => { + it('renders a simple anchor if the endpoint has no query params', () => { + const endpoint = 'http://100.104.208.71:15090/stats/prometheus'; + const endpointLink = shallow(); + const anchor = endpointLink.find('a'); + expect(anchor.prop('href')).toEqual(endpoint); + expect(anchor.children().text()).toEqual(endpoint); + expect(endpointLink.find('br')).toHaveLength(0); + }); + + it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => { + const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service'; + const endpointLink = shallow(); + const anchor = endpointLink.find('a'); + const badges = endpointLink.find(Badge); + expect(anchor.prop('href')).toEqual(endpoint); + expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe'); + expect(endpointLink.find('br')).toHaveLength(1); + expect(badges).toHaveLength(2); + const moduleLabel = badges.filterWhere(badge => badge.hasClass('module')); + expect(moduleLabel.children().text()).toEqual('module="http_2xx"'); + const targetLabel = badges.filterWhere(badge => badge.hasClass('target')); + expect(targetLabel.children().text()).toEqual('target="http://some-service"'); + }); + + it('renders an alert if url is invalid', () => { + const endpointLink = shallow(); + const err = endpointLink.find(Alert); + expect(err.render().text()).toEqual('Error: Invalid URL'); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/EndpointLink.tsx b/web/ui/react-app/src/pages/targets/EndpointLink.tsx new file mode 100644 index 0000000000..b67fcf455d --- /dev/null +++ b/web/ui/react-app/src/pages/targets/EndpointLink.tsx @@ -0,0 +1,38 @@ +import React, { FC } from 'react'; +import { Badge, Alert } from 'reactstrap'; + +export interface EndpointLinkProps { + endpoint: string; +} + +const EndpointLink: FC = ({ endpoint }) => { + let url: URL; + try { + url = new URL(endpoint); + } catch (e) { + return ( + + Error: {e.message} + + ); + } + + const { host, pathname, protocol, searchParams }: URL = url; + const params = Array.from(searchParams.entries()); + + return ( + <> + {`${protocol}//${host}${pathname}`} + {params.length > 0 ?
: null} + {params.map(([labelName, labelValue]: [string, string]) => { + return ( + + {`${labelName}="${labelValue}"`} + + ); + })} + + ); +}; + +export default EndpointLink; diff --git a/web/ui/react-app/src/pages/targets/Filter.module.css b/web/ui/react-app/src/pages/targets/Filter.module.css new file mode 100644 index 0000000000..6cc9adbe23 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/Filter.module.css @@ -0,0 +1,4 @@ +.btn { + margin-top: 0.6em; + margin-bottom: 15px; +} diff --git a/web/ui/react-app/src/pages/targets/Filter.test.tsx b/web/ui/react-app/src/pages/targets/Filter.test.tsx new file mode 100644 index 0000000000..79d444ff3e --- /dev/null +++ b/web/ui/react-app/src/pages/targets/Filter.test.tsx @@ -0,0 +1,45 @@ +import React, { Component } from 'react'; +import { shallow, ShallowWrapper } from 'enzyme'; +import { Button, ButtonGroup } from 'reactstrap'; +import Filter, { FilterData, FilterProps } from './Filter'; +import sinon, { SinonSpy } from 'sinon'; + +describe('Filter', () => { + const initialState: FilterData = { showHealthy: true, showUnhealthy: true }; + let setFilter: SinonSpy; + let filterWrapper: ShallowWrapper, Component<{}, {}, Component>>; + beforeEach(() => { + setFilter = sinon.spy(); + filterWrapper = shallow(); + }); + + it('renders a button group', () => { + expect(filterWrapper.find(ButtonGroup)).toHaveLength(1); + }); + + it('renders an all filter button that is active by default', () => { + const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all')); + expect(btn.prop('active')).toBe(true); + expect(btn.prop('color')).toBe('primary'); + }); + + it('renders an unhealthy filter button that is inactive by default', () => { + const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy')); + expect(btn.prop('active')).toBe(false); + expect(btn.prop('color')).toBe('primary'); + }); + + it('renders an all filter button which shows all targets', () => { + const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all')); + btn.simulate('click'); + expect(setFilter.calledOnce).toBe(true); + expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: true, showUnhealthy: true }); + }); + + it('renders an unhealthy filter button which filters targets', () => { + const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy')); + btn.simulate('click'); + expect(setFilter.calledOnce).toBe(true); + expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: false, showUnhealthy: true }); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/Filter.tsx b/web/ui/react-app/src/pages/targets/Filter.tsx new file mode 100644 index 0000000000..900a6d5b0d --- /dev/null +++ b/web/ui/react-app/src/pages/targets/Filter.tsx @@ -0,0 +1,39 @@ +import React, { Dispatch, FC, SetStateAction } from 'react'; +import { Button, ButtonGroup } from 'reactstrap'; +import styles from './Filter.module.css'; + +export interface FilterData { + showHealthy: boolean; + showUnhealthy: boolean; +} + +export interface FilterProps { + filter: FilterData; + setFilter: Dispatch>; +} + +const Filter: FC = ({ filter, setFilter }) => { + const { showHealthy } = filter; + const btnProps = { + all: { + active: showHealthy, + className: `all ${styles.btn}`, + color: 'primary', + onClick: (): void => setFilter({ ...filter, showHealthy: true }), + }, + unhealthy: { + active: !showHealthy, + className: `unhealthy ${styles.btn}`, + color: 'primary', + onClick: (): void => setFilter({ ...filter, showHealthy: false }), + }, + }; + return ( + + + + + ); +}; + +export default Filter; diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx new file mode 100644 index 0000000000..8363b0b23b --- /dev/null +++ b/web/ui/react-app/src/pages/targets/ScrapePoolList.test.tsx @@ -0,0 +1,98 @@ +import * as React from 'react'; +import { mount, shallow, ReactWrapper } from 'enzyme'; +import { act } from 'react-dom/test-utils'; +import { Alert } from 'reactstrap'; +import { sampleApiResponse } from './__testdata__/testdata'; +import ScrapePoolList from './ScrapePoolList'; +import ScrapePoolPanel from './ScrapePoolPanel'; +import { Target } from './target'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { faSpinner } from '@fortawesome/free-solid-svg-icons'; + +describe('Flags', () => { + const defaultProps = { + filter: { showHealthy: true, showUnhealthy: true }, + pathPrefix: '..', + }; + + beforeEach(() => { + fetch.resetMocks(); + }); + + describe('before data is returned', () => { + const scrapePoolList = shallow(); + const spinner = scrapePoolList.find(FontAwesomeIcon); + + it('renders a spinner', () => { + expect(spinner.prop('icon')).toEqual(faSpinner); + expect(spinner.prop('spin')).toBe(true); + }); + + it('renders exactly one spinner', () => { + expect(spinner).toHaveLength(1); + }); + }); + + describe('when data is returned', () => { + let scrapePoolList: ReactWrapper; + let mock: Promise; + beforeEach(() => { + //Tooltip requires DOM elements to exist. They do not in enzyme rendering so we must manually create them. + const scrapePools: { [key: string]: number } = { blackbox: 3, node_exporter: 1, prometheus: 1 }; + Object.keys(scrapePools).forEach((pool: string): void => { + Array.from(Array(scrapePools[pool]).keys()).forEach((idx: number): void => { + const div = document.createElement('div'); + div.id = `series-labels-${pool}-${idx}`; + document.body.appendChild(div); + }); + }); + mock = fetch.mockResponse(JSON.stringify(sampleApiResponse)); + }); + + it('renders a table', async () => { + await act(async () => { + scrapePoolList = mount(); + }); + scrapePoolList.update(); + expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); + const panels = scrapePoolList.find(ScrapePoolPanel); + expect(panels).toHaveLength(3); + const activeTargets: Target[] = sampleApiResponse.data.activeTargets as Target[]; + activeTargets.forEach(({ scrapePool }: Target) => { + const panel = scrapePoolList.find(ScrapePoolPanel).filterWhere(panel => panel.prop('scrapePool') === scrapePool); + expect(panel).toHaveLength(1); + }); + }); + + it('filters by health', async () => { + const props = { + ...defaultProps, + filter: { showHealthy: false, showUnhealthy: true }, + }; + await act(async () => { + scrapePoolList = mount(); + }); + scrapePoolList.update(); + expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); + const panels = scrapePoolList.find(ScrapePoolPanel); + expect(panels).toHaveLength(0); + }); + }); + + describe('when an error is returned', () => { + it('displays an alert', async () => { + const mock = fetch.mockReject(new Error('Error fetching targets')); + + let scrapePoolList: ReactWrapper; + await act(async () => { + scrapePoolList = mount(); + }); + scrapePoolList.update(); + + expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); + const alert = scrapePoolList.find(Alert); + expect(alert.prop('color')).toBe('danger'); + expect(alert.text()).toContain('Error fetching targets'); + }); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx new file mode 100644 index 0000000000..d46efc0c01 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/ScrapePoolList.tsx @@ -0,0 +1,55 @@ +import React, { FC } from 'react'; +import { FilterData } from './Filter'; +import { useFetch } from '../../utils/useFetch'; +import { ScrapePool, groupTargets } from './target'; +import ScrapePoolPanel from './ScrapePoolPanel'; +import PathPrefixProps from '../../PathPrefixProps'; +import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; +import { faSpinner } from '@fortawesome/free-solid-svg-icons'; +import { Alert } from 'reactstrap'; + +interface ScrapePoolListProps { + filter: FilterData; +} + +const filterByHealth = ({ upCount, targets }: ScrapePool, { showHealthy, showUnhealthy }: FilterData): boolean => { + const isHealthy = upCount === targets.length; + return (isHealthy && showHealthy) || (!isHealthy && showUnhealthy); +}; + +const ScrapePoolList: FC = ({ filter, pathPrefix }) => { + const { response, error } = useFetch(`${pathPrefix}/api/v1/targets?state=active`); + + if (error) { + return ( + + Error fetching targets: {error.message} + + ); + } else if (response && response.status !== 'success') { + return ( + + Error fetching targets: {response.status} + + ); + } else if (response && response.data) { + const { activeTargets } = response.data; + const targetGroups = groupTargets(activeTargets); + return ( + <> + {Object.keys(targetGroups) + .filter((scrapePool: string) => filterByHealth(targetGroups[scrapePool], filter)) + .map((scrapePool: string) => { + const targetGroupProps = { + scrapePool, + targetGroup: targetGroups[scrapePool], + }; + return ; + })} + + ); + } + return ; +}; + +export default ScrapePoolList; diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolPanel.module.css b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.module.css new file mode 100644 index 0000000000..a5b5f9ac5b --- /dev/null +++ b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.module.css @@ -0,0 +1,43 @@ +.container { + margin-top: -12px; +} + +.title { + font-size: 20px; + font-weight: bold; + cursor: pointer; +} + +.normal { + composes: title; +} + +.danger { + composes: title; + color: rgb(242, 65, 65); +} + +.table { + width: 100%; +} + +.cell { + height: auto; + word-wrap: break-word; + word-break: break-all; +} + +.endpoint, .labels { + composes: cell; + width: 25%; +} + +.state, .last-scrape { + composes: cell; + width: 10%; +} + +.errors { + composes: cell; + width: 30%; +} diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolPanel.test.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.test.tsx new file mode 100644 index 0000000000..2d98279895 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.test.tsx @@ -0,0 +1,142 @@ +import React from 'react'; +import { mount, shallow } from 'enzyme'; +import { targetGroups } from './__testdata__/testdata'; +import ScrapePoolPanel, { columns } from './ScrapePoolPanel'; +import { Button, Collapse, Table, Badge } from 'reactstrap'; +import { Target, getColor } from './target'; +import EndpointLink from './EndpointLink'; +import TargetLabels from './TargetLabels'; + +describe('ScrapePoolPanel', () => { + const defaultProps = { + scrapePool: 'blackbox', + targetGroup: targetGroups.blackbox, + }; + const scrapePoolPanel = shallow(); + + it('renders a container', () => { + const div = scrapePoolPanel.find('div').filterWhere(elem => elem.hasClass('container')); + expect(div).toHaveLength(1); + }); + + describe('Header', () => { + it('renders an h3', () => { + expect(scrapePoolPanel.find('h3')).toHaveLength(1); + }); + + it('renders an anchor with up count and danger color if upCount < targetsCount', () => { + const anchor = scrapePoolPanel.find('a'); + expect(anchor).toHaveLength(1); + expect(anchor.prop('id')).toEqual('pool-blackbox'); + expect(anchor.prop('href')).toEqual('#pool-blackbox'); + expect(anchor.text()).toEqual('blackbox (2/3 up)'); + expect(anchor.prop('className')).toEqual('danger'); + }); + + it('renders an anchor with up count and normal color if upCount == targetsCount', () => { + const props = { + scrapePool: 'prometheus', + targetGroup: targetGroups.prometheus, + }; + const scrapePoolPanel = shallow(); + const anchor = scrapePoolPanel.find('a'); + expect(anchor).toHaveLength(1); + expect(anchor.prop('id')).toEqual('pool-prometheus'); + expect(anchor.prop('href')).toEqual('#pool-prometheus'); + expect(anchor.text()).toEqual('prometheus (1/1 up)'); + expect(anchor.prop('className')).toEqual('normal'); + }); + + it('renders a show less btn if expanded', () => { + const btn = scrapePoolPanel.find(Button); + expect(btn).toHaveLength(1); + expect(btn.prop('color')).toEqual('primary'); + expect(btn.prop('size')).toEqual('xs'); + expect(btn.render().text()).toEqual('show less'); + }); + + it('renders a show more btn if collapsed', () => { + const props = { + scrapePool: 'prometheus', + targetGroup: targetGroups.prometheus, + }; + const div = document.createElement('div'); + div.id = `series-labels-prometheus-0`; + document.body.appendChild(div); + const scrapePoolPanel = mount(); + + const btn = scrapePoolPanel.find(Button); + btn.simulate('click'); + expect(btn.render().text()).toEqual('show more'); + const collapse = scrapePoolPanel.find(Collapse); + expect(collapse.prop('isOpen')).toBe(false); + }); + }); + + it('renders a Collapse component', () => { + const collapse = scrapePoolPanel.find(Collapse); + expect(collapse.prop('isOpen')).toBe(true); + }); + + describe('Table', () => { + it('renders a table', () => { + const table = scrapePoolPanel.find(Table); + const headers = table.find('th'); + expect(table).toHaveLength(1); + expect(headers).toHaveLength(6); + columns.forEach(col => { + expect(headers.contains(col)); + }); + }); + + describe('for each target', () => { + const table = scrapePoolPanel.find(Table); + defaultProps.targetGroup.targets.forEach( + ({ discoveredLabels, labels, scrapeUrl, lastError, health }: Target, idx: number) => { + const row = table.find('tr').at(idx + 1); + + it('renders an EndpointLink with the scrapeUrl', () => { + const link = row.find(EndpointLink); + expect(link).toHaveLength(1); + expect(link.prop('endpoint')).toEqual(scrapeUrl); + }); + + it('renders a badge for health', () => { + const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('state'))); + const badge = td.find(Badge); + expect(badge).toHaveLength(1); + expect(badge.prop('color')).toEqual(getColor(health)); + expect(badge.children().text()).toEqual(health.toUpperCase()); + }); + + it('renders series labels', () => { + const targetLabels = row.find(TargetLabels); + expect(targetLabels).toHaveLength(1); + expect(targetLabels.prop('discoveredLabels')).toEqual(discoveredLabels); + expect(targetLabels.prop('labels')).toEqual(labels); + }); + + it('renders last scrape time', () => { + const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('last-scrape'))); + expect(lastScrapeCell).toHaveLength(1); + }); + + it('renders last scrape duration', () => { + const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('scrape-duration'))); + expect(lastScrapeCell).toHaveLength(1); + }); + + it('renders a badge for Errors', () => { + const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('errors'))); + const badge = td.find(Badge); + expect(badge).toHaveLength(lastError ? 1 : 0); + if (lastError) { + expect(badge.prop('color')).toEqual('danger'); + expect(badge.children().text()).toEqual(lastError); + } + }); + } + ); + }); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/ScrapePoolPanel.tsx b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.tsx new file mode 100644 index 0000000000..b2c8497f28 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/ScrapePoolPanel.tsx @@ -0,0 +1,95 @@ +import React, { FC } from 'react'; +import { ScrapePool, getColor } from './target'; +import { Button, Collapse, Table, Badge } from 'reactstrap'; +import styles from './ScrapePoolPanel.module.css'; +import { Target } from './target'; +import EndpointLink from './EndpointLink'; +import TargetLabels from './TargetLabels'; +import { formatRelative, humanizeDuration } from '../../utils/timeFormat'; +import { now } from 'moment'; +import { useLocalStorage } from '../../hooks/useLocalStorage'; + +interface PanelProps { + scrapePool: string; + targetGroup: ScrapePool; +} + +export const columns = ['Endpoint', 'State', 'Labels', 'Last Scrape', 'Scrape Duration', 'Error']; + +const ScrapePoolPanel: FC = ({ scrapePool, targetGroup }) => { + const [{ expanded }, setOptions] = useLocalStorage(`targets-${scrapePool}-expanded`, { expanded: true }); + const modifier = targetGroup.upCount < targetGroup.targets.length ? 'danger' : 'normal'; + const id = `pool-${scrapePool}`; + const anchorProps = { + href: `#${id}`, + id, + }; + const btnProps = { + children: `show ${expanded ? 'less' : 'more'}`, + color: 'primary', + onClick: (): void => setOptions({ expanded: !expanded }), + size: 'xs', + style: { + padding: '0.3em 0.3em 0.25em 0.3em', + fontSize: '0.375em', + marginLeft: '1em', + verticalAlign: 'baseline', + }, + }; + + return ( +
+

+ + {`${scrapePool} (${targetGroup.upCount}/${targetGroup.targets.length} up)`} + +

+ + + + + {columns.map(column => ( + + ))} + + + + {targetGroup.targets.map((target: Target, idx: number) => { + const { + discoveredLabels, + labels, + scrapePool, + scrapeUrl, + lastError, + lastScrape, + lastScrapeDuration, + health, + } = target; + const color = getColor(health); + + return ( + + + + + + + + + ); + })} + +
{column}
+ + + {health.toUpperCase()} + + + {formatRelative(lastScrape, now())}{humanizeDuration(lastScrapeDuration * 1000)}{lastError ? {lastError} : null}
+
+
+ ); +}; + +export default ScrapePoolPanel; diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.module.css b/web/ui/react-app/src/pages/targets/TargetLabels.module.css new file mode 100644 index 0000000000..9c3768818e --- /dev/null +++ b/web/ui/react-app/src/pages/targets/TargetLabels.module.css @@ -0,0 +1,3 @@ +.discovered { + white-space: nowrap; +} diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx b/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx new file mode 100644 index 0000000000..51f402c1f3 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/TargetLabels.test.tsx @@ -0,0 +1,50 @@ +import * as React from 'react'; +import { shallow } from 'enzyme'; +import TargetLabels from './TargetLabels'; +import { Tooltip, Badge } from 'reactstrap'; +import toJson from 'enzyme-to-json'; + +describe('targetLabels', () => { + const defaultProps = { + discoveredLabels: { + __address__: 'localhost:9100', + __metrics_path__: '/metrics', + __scheme__: 'http', + job: 'node_exporter', + }, + labels: { + instance: 'localhost:9100', + job: 'node_exporter', + foo: 'bar', + }, + idx: 1, + scrapePool: 'cortex/node-exporter_group/0', + }; + const targetLabels = shallow(); + + it('renders a div of series labels', () => { + const div = targetLabels.find('div').filterWhere(elem => elem.hasClass('series-labels-container')); + expect(div).toHaveLength(1); + expect(div.prop('id')).toEqual('series-labels-cortex/node-exporter_group/0-1'); + }); + + it('wraps each label in a label badge', () => { + const l: { [key: string]: string } = defaultProps.labels; + Object.keys(l).forEach((labelName: string): void => { + const badge = targetLabels.find(Badge).filterWhere(badge => badge.hasClass(labelName)); + expect(badge.children().text()).toEqual(`${labelName}="${l[labelName]}"`); + }); + expect(targetLabels.find(Badge)).toHaveLength(3); + }); + + it('renders a tooltip for discovered labels', () => { + const tooltip = targetLabels.find(Tooltip); + expect(tooltip).toHaveLength(1); + expect(tooltip.prop('isOpen')).toBe(false); + expect(tooltip.prop('target')).toEqual('series-labels-cortex/node-exporter_group/0-1'); + }); + + it('renders discovered labels', () => { + expect(toJson(targetLabels)).toMatchSnapshot(); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/TargetLabels.tsx b/web/ui/react-app/src/pages/targets/TargetLabels.tsx new file mode 100644 index 0000000000..0e8dfeacec --- /dev/null +++ b/web/ui/react-app/src/pages/targets/TargetLabels.tsx @@ -0,0 +1,48 @@ +import React, { FC, Fragment, useState } from 'react'; +import { Badge, Tooltip } from 'reactstrap'; +import styles from './TargetLabels.module.css'; + +interface Labels { + [key: string]: string; +} + +export interface TargetLabelsProps { + discoveredLabels: Labels; + labels: Labels; + idx: number; + scrapePool: string; +} + +const formatLabels = (labels: Labels): string[] => Object.keys(labels).map(key => `${key}="${labels[key]}"`); + +const TargetLabels: FC = ({ discoveredLabels, labels, idx, scrapePool }) => { + const [tooltipOpen, setTooltipOpen] = useState(false); + + const toggle = (): void => setTooltipOpen(!tooltipOpen); + const id = `series-labels-${scrapePool}-${idx}`; + + return ( + <> +
+ {Object.keys(labels).map(labelName => { + return ( + + {`${labelName}="${labels[labelName]}"`} + + ); + })} +
+ + Before relabeling: + {formatLabels(discoveredLabels).map((s: string, idx: number) => ( + +
+ {s} +
+ ))} +
+ + ); +}; + +export default TargetLabels; diff --git a/web/ui/react-app/src/pages/targets/Targets.test.tsx b/web/ui/react-app/src/pages/targets/Targets.test.tsx new file mode 100644 index 0000000000..866c35b095 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/Targets.test.tsx @@ -0,0 +1,33 @@ +import React from 'react'; +import { shallow } from 'enzyme'; +import Targets from './Targets'; +import Filter from './Filter'; +import ScrapePoolList from './ScrapePoolList'; + +describe('Targets', () => { + const defaultProps = { + pathPrefix: '..', + }; + const targets = shallow(); + describe('Header', () => { + const h2 = targets.find('h2'); + it('renders a header', () => { + expect(h2.text()).toEqual('Targets'); + }); + it('renders exactly one header', () => { + const h2 = targets.find('h2'); + expect(h2).toHaveLength(1); + }); + }); + it('renders a filter', () => { + const filter = targets.find(Filter); + expect(filter).toHaveLength(1); + expect(filter.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true }); + }); + it('renders a scrape pool list', () => { + const scrapePoolList = targets.find(ScrapePoolList); + expect(scrapePoolList).toHaveLength(1); + expect(scrapePoolList.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true }); + expect(scrapePoolList.prop('pathPrefix')).toEqual(defaultProps.pathPrefix); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/Targets.tsx b/web/ui/react-app/src/pages/targets/Targets.tsx new file mode 100644 index 0000000000..3de6a02c88 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/Targets.tsx @@ -0,0 +1,22 @@ +import React, { FC } from 'react'; +import { RouteComponentProps } from '@reach/router'; +import Filter from './Filter'; +import ScrapePoolList from './ScrapePoolList'; +import PathPrefixProps from '../../PathPrefixProps'; +import { useLocalStorage } from '../../hooks/useLocalStorage'; + +const Targets: FC = ({ pathPrefix }) => { + const [filter, setFilter] = useLocalStorage('targets-page-filter', { showHealthy: true, showUnhealthy: true }); + const filterProps = { filter, setFilter }; + const scrapePoolListProps = { filter, pathPrefix }; + + return ( + <> +

Targets

+ + + + ); +}; + +export default Targets; diff --git a/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap b/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap new file mode 100644 index 0000000000..3630b4fc89 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/__snapshots__/TargetLabels.test.tsx.snap @@ -0,0 +1,81 @@ +// Jest Snapshot v1, https://goo.gl/fbAQLP + +exports[`targetLabels renders discovered labels 1`] = ` + +
+ + instance="localhost:9100" + + + job="node_exporter" + + + foo="bar" + +
+ + + Before relabeling: + +
+ + __address__="localhost:9100" + +
+ + __metrics_path__="/metrics" + +
+ + __scheme__="http" + +
+ + job="node_exporter" + +
+
+`; diff --git a/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts b/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts new file mode 100644 index 0000000000..e7cb3de1d6 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/__testdata__/testdata.ts @@ -0,0 +1,215 @@ +/* eslint @typescript-eslint/camelcase: 0 */ + +import { ScrapePools, Target, Labels } from '../target'; + +export const targetGroups: ScrapePools = Object.freeze({ + blackbox: { + upCount: 2, + targets: [ + { + discoveredLabels: { + __address__: 'http://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:14.759299-07:00', + lastScrapeDuration: 36560147, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'https://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'https://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:24.731096-07:00', + lastScrapeDuration: 49448763, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'http://example.com:8080', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://example.com:8080', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080', + lastError: '', + lastScrape: '2019-11-04T11:52:13.516654-07:00', + lastScrapeDuration: 120916592, + health: 'down', + }, + ], + }, + node_exporter: { + upCount: 1, + targets: [ + { + discoveredLabels: { + __address__: 'localhost:9100', + __metrics_path__: '/metrics', + __scheme__: 'http', + job: 'node_exporter', + }, + labels: { + instance: 'localhost:9100', + job: 'node_exporter', + }, + scrapePool: 'node_exporter', + scrapeUrl: 'http://localhost:9100/metrics', + lastError: '', + lastScrape: '2019-11-04T11:52:14.145703-07:00', + lastScrapeDuration: 3842307, + health: 'up', + }, + ], + }, + prometheus: { + upCount: 1, + targets: [ + { + discoveredLabels: { + __address__: 'localhost:9090', + __metrics_path__: '/metrics', + __scheme__: 'http', + job: 'prometheus', + }, + labels: { + instance: 'localhost:9090', + job: 'prometheus', + }, + scrapePool: 'prometheus', + scrapeUrl: 'http://localhost:9090/metrics', + lastError: '', + lastScrape: '2019-11-04T11:52:18.479731-07:00', + lastScrapeDuration: 4050976, + health: 'up', + }, + ], + }, +}); + +export const sampleApiResponse = Object.freeze({ + status: 'success', + data: { + activeTargets: [ + { + discoveredLabels: { + __address__: 'http://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:14.759299-07:00', + lastScrapeDuration: 36560147, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'https://prometheus.io', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'https://prometheus.io', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io', + lastError: '', + lastScrape: '2019-11-04T11:52:24.731096-07:00', + lastScrapeDuration: 49448763, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'http://example.com:8080', + __metrics_path__: '/probe', + __param_module: 'http_2xx', + __scheme__: 'http', + job: 'blackbox', + }, + labels: { + instance: 'http://example.com:8080', + job: 'blackbox', + }, + scrapePool: 'blackbox', + scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080', + lastError: '', + lastScrape: '2019-11-04T11:52:13.516654-07:00', + lastScrapeDuration: 120916592, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'localhost:9100', + __metrics_path__: '/metrics', + __scheme__: 'http', + job: 'node_exporter', + }, + labels: { + instance: 'localhost:9100', + job: 'node_exporter', + }, + scrapePool: 'node_exporter', + scrapeUrl: 'http://localhost:9100/metrics', + lastError: '', + lastScrape: '2019-11-04T11:52:14.145703-07:00', + lastScrapeDuration: 3842307, + health: 'up', + }, + { + discoveredLabels: { + __address__: 'localhost:9090', + __metrics_path__: '/metrics', + __scheme__: 'http', + job: 'prometheus', + }, + labels: { + instance: 'localhost:9090', + job: 'prometheus', + }, + scrapePool: 'prometheus', + scrapeUrl: 'http://localhost:9090/metrics', + lastError: '', + lastScrape: '2019-11-04T11:52:18.479731-07:00', + lastScrapeDuration: 4050976, + health: 'up', + }, + ], + }, +}); diff --git a/web/ui/react-app/src/pages/targets/target.test.ts b/web/ui/react-app/src/pages/targets/target.test.ts new file mode 100644 index 0000000000..613ec97792 --- /dev/null +++ b/web/ui/react-app/src/pages/targets/target.test.ts @@ -0,0 +1,45 @@ +/* eslint @typescript-eslint/camelcase: 0 */ + +import { sampleApiResponse } from './__testdata__/testdata'; +import { groupTargets, Target, ScrapePools, getColor } from './target'; +import { string } from 'prop-types'; + +describe('groupTargets', () => { + const targets: Target[] = sampleApiResponse.data.activeTargets as Target[]; + const targetGroups: ScrapePools = groupTargets(targets); + + it('groups a list of targets by scrape job', () => { + ['blackbox', 'prometheus', 'node_exporter'].forEach(scrapePool => { + expect(Object.keys(targetGroups)).toContain(scrapePool); + }); + Object.keys(targetGroups).forEach((scrapePool: string): void => { + const ts: Target[] = targetGroups[scrapePool].targets; + ts.forEach((t: Target) => { + expect(t.scrapePool).toEqual(scrapePool); + }); + }); + }); + + it('adds upCount during aggregation', () => { + const testCases: { [key: string]: number } = { blackbox: 3, prometheus: 1, node_exporter: 1 }; + Object.keys(testCases).forEach((scrapePool: string): void => { + expect(targetGroups[scrapePool].upCount).toEqual(testCases[scrapePool]); + }); + }); +}); + +describe('getColor', () => { + const testCases: { color: string; status: string }[] = [ + { color: 'danger', status: 'down' }, + { color: 'danger', status: 'DOWN' }, + { color: 'warning', status: 'unknown' }, + { color: 'warning', status: 'foo' }, + { color: 'success', status: 'up' }, + { color: 'success', status: 'Up' }, + ]; + testCases.forEach(({ color, status }) => { + it(`returns ${color} for ${status} status`, () => { + expect(getColor(status)).toEqual(color); + }); + }); +}); diff --git a/web/ui/react-app/src/pages/targets/target.ts b/web/ui/react-app/src/pages/targets/target.ts new file mode 100644 index 0000000000..cca5ca138f --- /dev/null +++ b/web/ui/react-app/src/pages/targets/target.ts @@ -0,0 +1,49 @@ +export interface Labels { + [key: string]: string; +} + +export interface Target { + discoveredLabels: Labels; + labels: Labels; + scrapePool: string; + scrapeUrl: string; + lastError: string; + lastScrape: string; + lastScrapeDuration: number; + health: string; +} + +export interface ScrapePool { + upCount: number; + targets: Target[]; +} + +export interface ScrapePools { + [scrapePool: string]: ScrapePool; +} + +export const groupTargets = (targets: Target[]): ScrapePools => + targets.reduce((pools: ScrapePools, target: Target) => { + const { health, scrapePool } = target; + const up = health.toLowerCase() === 'up' ? 1 : 0; + if (!pools[scrapePool]) { + pools[scrapePool] = { + upCount: 0, + targets: [], + }; + } + pools[scrapePool].targets.push(target); + pools[scrapePool].upCount += up; + return pools; + }, {}); + +export const getColor = (health: string): string => { + switch (health.toLowerCase()) { + case 'up': + return 'success'; + case 'down': + return 'danger'; + default: + return 'warning'; + } +}; diff --git a/web/ui/react-app/src/utils/timeFormat.test.ts b/web/ui/react-app/src/utils/timeFormat.test.ts index a5cc398d2b..9218cbbe35 100644 --- a/web/ui/react-app/src/utils/timeFormat.test.ts +++ b/web/ui/react-app/src/utils/timeFormat.test.ts @@ -1,4 +1,4 @@ -import { formatTime, parseTime, formatRange, parseRange } from './timeFormat'; +import { formatTime, parseTime, formatRange, parseRange, humanizeDuration, formatRelative, now } from './timeFormat'; describe('formatTime', () => { it('returns a time string representing the time in seconds', () => { @@ -11,6 +11,7 @@ describe('parseTime', () => { it('returns a time string representing the time in seconds', () => { expect(parseTime('2019-10-26 00:23')).toEqual(1572049380000); expect(parseTime('1970-01-01 00:00')).toEqual(0); + expect(parseTime('0001-01-01T00:00:00Z')).toEqual(-62135596800000); }); }); @@ -35,3 +36,47 @@ describe('parseRange', () => { expect(parseRange('63s')).toEqual(63); }); }); + +describe('humanizeDuration', () => { + it('humanizes zero', () => { + expect(humanizeDuration(0)).toEqual('0s'); + }); + it('humanizes milliseconds', () => { + expect(humanizeDuration(1.234567)).toEqual('1.235ms'); + expect(humanizeDuration(12.34567)).toEqual('12.346ms'); + expect(humanizeDuration(123.45678)).toEqual('123.457ms'); + expect(humanizeDuration(123)).toEqual('123.000ms'); + }); + it('humanizes seconds', () => { + expect(humanizeDuration(12340)).toEqual('12.340s'); + }); + it('humanizes minutes', () => { + expect(humanizeDuration(1234567)).toEqual('20m 34s'); + }); + + it('humanizes hours', () => { + expect(humanizeDuration(12345678)).toEqual('3h 25m 45s'); + }); + + it('humanizes days', () => { + expect(humanizeDuration(123456789)).toEqual('1d 10h 17m 36s'); + expect(humanizeDuration(123456789000)).toEqual('1428d 21h 33m 9s'); + }); + it('takes sign into account', () => { + expect(humanizeDuration(-123456789000)).toEqual('-1428d 21h 33m 9s'); + }); +}); + +describe('formatRelative', () => { + it('renders never for pre-beginning-of-time strings', () => { + expect(formatRelative('0001-01-01T00:00:00Z', now())).toEqual('Never'); + }); + it('renders a humanized duration for sane durations', () => { + expect(formatRelative('2019-11-04T09:15:29.578701-07:00', parseTime('2019-11-04T09:15:35.8701-07:00'))).toEqual( + '6.292s' + ); + expect(formatRelative('2019-11-04T09:15:35.8701-07:00', parseTime('2019-11-04T09:15:29.578701-07:00'))).toEqual( + '-6.292s' + ); + }); +}); diff --git a/web/ui/react-app/src/utils/timeFormat.ts b/web/ui/react-app/src/utils/timeFormat.ts index 6e39918ac9..402f661baf 100644 --- a/web/ui/react-app/src/utils/timeFormat.ts +++ b/web/ui/react-app/src/utils/timeFormat.ts @@ -36,3 +36,40 @@ export function parseTime(timeText: string): number { export function formatTime(time: number): string { return moment.utc(time).format('YYYY-MM-DD HH:mm'); } + +export const now = (): number => moment().valueOf(); + +export const humanizeDuration = (milliseconds: number): string => { + const sign = milliseconds < 0 ? '-' : ''; + const unsignedMillis = milliseconds < 0 ? -1 * milliseconds : milliseconds; + const duration = moment.duration(unsignedMillis, 'ms'); + const ms = Math.floor(duration.milliseconds()); + const s = Math.floor(duration.seconds()); + const m = Math.floor(duration.minutes()); + const h = Math.floor(duration.hours()); + const d = Math.floor(duration.asDays()); + if (d !== 0) { + return `${sign}${d}d ${h}h ${m}m ${s}s`; + } + if (h !== 0) { + return `${sign}${h}h ${m}m ${s}s`; + } + if (m !== 0) { + return `${sign}${m}m ${s}s`; + } + if (s !== 0) { + return `${sign}${s}.${ms}s`; + } + if (unsignedMillis > 0) { + return `${sign}${unsignedMillis.toFixed(3)}ms`; + } + return '0s'; +}; + +export const formatRelative = (startStr: string, end: number): string => { + const start = parseTime(startStr); + if (start < 0) { + return 'Never'; + } + return humanizeDuration(end - start); +};