React UI: Implement /targets page (#6276)
* Add LastScrapeDuration to targets endpoint Signed-off-by: Dustin Hooten <dhooten@splunk.com> * Add Scrape job name to targets endpoint Signed-off-by: Dustin Hooten <dhooten@splunk.com> * Implement the /targets page in react Signed-off-by: Dustin Hooten <dhooten@splunk.com> * Add state query param to targets endpoint Signed-off-by: Dustin Hooten <dhooten@splunk.com> * Use state filter in api call Signed-off-by: Dustin Hooten <dhooten@splunk.com> * api feedback Signed-off-by: Dustin Hooten <dhooten@splunk.com> * pr feedback frontend Signed-off-by: Dustin Hooten <dhooten@splunk.com> * Implement and use localstorage hook Signed-off-by: Dustin Hooten <dhooten@splunk.com> * PR feedback Signed-off-by: Dustin Hooten <dhooten@splunk.com>pull/6308/head
parent
454315337b
commit
ca60bf298c
@ -0,0 +1,27 @@ |
||||
import { useLocalStorage } from './useLocalStorage'; |
||||
import { renderHook, act } from '@testing-library/react-hooks'; |
||||
|
||||
describe('useLocalStorage', () => { |
||||
it('returns the initialState', () => { |
||||
const initialState = { a: 1, b: 2 }; |
||||
const { result } = renderHook(() => useLocalStorage('mystorage', initialState)); |
||||
expect(result.current[0]).toEqual(initialState); |
||||
}); |
||||
it('stores the initialState as serialized json in localstorage', () => { |
||||
const key = 'mystorage'; |
||||
const initialState = { a: 1, b: 2 }; |
||||
renderHook(() => useLocalStorage(key, initialState)); |
||||
expect(localStorage.getItem(key)).toEqual(JSON.stringify(initialState)); |
||||
}); |
||||
it('returns a setValue function that can reset local storage', () => { |
||||
const key = 'mystorage'; |
||||
const initialState = { a: 1, b: 2 }; |
||||
const { result } = renderHook(() => useLocalStorage(key, initialState)); |
||||
const newValue = { a: 2, b: 5 }; |
||||
act(() => { |
||||
result.current[1](newValue); |
||||
}); |
||||
expect(result.current[0]).toEqual(newValue); |
||||
expect(localStorage.getItem(key)).toEqual(JSON.stringify(newValue)); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,13 @@ |
||||
import { Dispatch, SetStateAction, useEffect, useState } from 'react'; |
||||
|
||||
export function useLocalStorage<S>(localStorageKey: string, initialState: S): [S, Dispatch<SetStateAction<S>>] { |
||||
const localStorageState = JSON.parse(localStorage.getItem(localStorageKey) as string); |
||||
const [value, setValue] = useState(localStorageState || initialState); |
||||
|
||||
useEffect(() => { |
||||
const serializedState = JSON.stringify(value); |
||||
localStorage.setItem(localStorageKey, serializedState); |
||||
}, [localStorageKey, value]); |
||||
|
||||
return [value, setValue]; |
||||
} |
||||
@ -1,15 +0,0 @@ |
||||
import React, { FC } from 'react'; |
||||
import { RouteComponentProps } from '@reach/router'; |
||||
import PathPrefixProps from '../PathPrefixProps'; |
||||
import { Alert } from 'reactstrap'; |
||||
|
||||
const Targets: FC<RouteComponentProps & PathPrefixProps> = ({ pathPrefix }) => ( |
||||
<> |
||||
<h2>Targets</h2> |
||||
<Alert color="warning"> |
||||
This page is still under construction. Please try it in the <a href={`${pathPrefix}/targets`}>Classic UI</a>. |
||||
</Alert> |
||||
</> |
||||
); |
||||
|
||||
export default Targets; |
||||
@ -0,0 +1,36 @@ |
||||
import React from 'react'; |
||||
import { shallow } from 'enzyme'; |
||||
import { Badge, Alert } from 'reactstrap'; |
||||
import EndpointLink from './EndpointLink'; |
||||
|
||||
describe('EndpointLink', () => { |
||||
it('renders a simple anchor if the endpoint has no query params', () => { |
||||
const endpoint = 'http://100.104.208.71:15090/stats/prometheus'; |
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />); |
||||
const anchor = endpointLink.find('a'); |
||||
expect(anchor.prop('href')).toEqual(endpoint); |
||||
expect(anchor.children().text()).toEqual(endpoint); |
||||
expect(endpointLink.find('br')).toHaveLength(0); |
||||
}); |
||||
|
||||
it('renders an anchor targeting endpoint but with query param labels if the endpoint has query params', () => { |
||||
const endpoint = 'http://100.99.128.71:9115/probe?module=http_2xx&target=http://some-service'; |
||||
const endpointLink = shallow(<EndpointLink endpoint={endpoint} />); |
||||
const anchor = endpointLink.find('a'); |
||||
const badges = endpointLink.find(Badge); |
||||
expect(anchor.prop('href')).toEqual(endpoint); |
||||
expect(anchor.children().text()).toEqual('http://100.99.128.71:9115/probe'); |
||||
expect(endpointLink.find('br')).toHaveLength(1); |
||||
expect(badges).toHaveLength(2); |
||||
const moduleLabel = badges.filterWhere(badge => badge.hasClass('module')); |
||||
expect(moduleLabel.children().text()).toEqual('module="http_2xx"'); |
||||
const targetLabel = badges.filterWhere(badge => badge.hasClass('target')); |
||||
expect(targetLabel.children().text()).toEqual('target="http://some-service"'); |
||||
}); |
||||
|
||||
it('renders an alert if url is invalid', () => { |
||||
const endpointLink = shallow(<EndpointLink endpoint={'afdsacas'} />); |
||||
const err = endpointLink.find(Alert); |
||||
expect(err.render().text()).toEqual('Error: Invalid URL'); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,38 @@ |
||||
import React, { FC } from 'react'; |
||||
import { Badge, Alert } from 'reactstrap'; |
||||
|
||||
export interface EndpointLinkProps { |
||||
endpoint: string; |
||||
} |
||||
|
||||
const EndpointLink: FC<EndpointLinkProps> = ({ endpoint }) => { |
||||
let url: URL; |
||||
try { |
||||
url = new URL(endpoint); |
||||
} catch (e) { |
||||
return ( |
||||
<Alert color="danger"> |
||||
<strong>Error:</strong> {e.message} |
||||
</Alert> |
||||
); |
||||
} |
||||
|
||||
const { host, pathname, protocol, searchParams }: URL = url; |
||||
const params = Array.from(searchParams.entries()); |
||||
|
||||
return ( |
||||
<> |
||||
<a href={endpoint}>{`${protocol}//${host}${pathname}`}</a> |
||||
{params.length > 0 ? <br /> : null} |
||||
{params.map(([labelName, labelValue]: [string, string]) => { |
||||
return ( |
||||
<Badge color="primary" className={`mr-1 ${labelName}`} key={labelName}> |
||||
{`${labelName}="${labelValue}"`} |
||||
</Badge> |
||||
); |
||||
})} |
||||
</> |
||||
); |
||||
}; |
||||
|
||||
export default EndpointLink; |
||||
@ -0,0 +1,4 @@ |
||||
.btn { |
||||
margin-top: 0.6em; |
||||
margin-bottom: 15px; |
||||
} |
||||
@ -0,0 +1,45 @@ |
||||
import React, { Component } from 'react'; |
||||
import { shallow, ShallowWrapper } from 'enzyme'; |
||||
import { Button, ButtonGroup } from 'reactstrap'; |
||||
import Filter, { FilterData, FilterProps } from './Filter'; |
||||
import sinon, { SinonSpy } from 'sinon'; |
||||
|
||||
describe('Filter', () => { |
||||
const initialState: FilterData = { showHealthy: true, showUnhealthy: true }; |
||||
let setFilter: SinonSpy; |
||||
let filterWrapper: ShallowWrapper<FilterProps, Readonly<{}>, Component<{}, {}, Component>>; |
||||
beforeEach(() => { |
||||
setFilter = sinon.spy(); |
||||
filterWrapper = shallow(<Filter filter={initialState} setFilter={setFilter} />); |
||||
}); |
||||
|
||||
it('renders a button group', () => { |
||||
expect(filterWrapper.find(ButtonGroup)).toHaveLength(1); |
||||
}); |
||||
|
||||
it('renders an all filter button that is active by default', () => { |
||||
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all')); |
||||
expect(btn.prop('active')).toBe(true); |
||||
expect(btn.prop('color')).toBe('primary'); |
||||
}); |
||||
|
||||
it('renders an unhealthy filter button that is inactive by default', () => { |
||||
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy')); |
||||
expect(btn.prop('active')).toBe(false); |
||||
expect(btn.prop('color')).toBe('primary'); |
||||
}); |
||||
|
||||
it('renders an all filter button which shows all targets', () => { |
||||
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('all')); |
||||
btn.simulate('click'); |
||||
expect(setFilter.calledOnce).toBe(true); |
||||
expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: true, showUnhealthy: true }); |
||||
}); |
||||
|
||||
it('renders an unhealthy filter button which filters targets', () => { |
||||
const btn = filterWrapper.find(Button).filterWhere((btn): boolean => btn.hasClass('unhealthy')); |
||||
btn.simulate('click'); |
||||
expect(setFilter.calledOnce).toBe(true); |
||||
expect(setFilter.getCall(0).args[0]).toEqual({ showHealthy: false, showUnhealthy: true }); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,39 @@ |
||||
import React, { Dispatch, FC, SetStateAction } from 'react'; |
||||
import { Button, ButtonGroup } from 'reactstrap'; |
||||
import styles from './Filter.module.css'; |
||||
|
||||
export interface FilterData { |
||||
showHealthy: boolean; |
||||
showUnhealthy: boolean; |
||||
} |
||||
|
||||
export interface FilterProps { |
||||
filter: FilterData; |
||||
setFilter: Dispatch<SetStateAction<FilterData>>; |
||||
} |
||||
|
||||
const Filter: FC<FilterProps> = ({ filter, setFilter }) => { |
||||
const { showHealthy } = filter; |
||||
const btnProps = { |
||||
all: { |
||||
active: showHealthy, |
||||
className: `all ${styles.btn}`, |
||||
color: 'primary', |
||||
onClick: (): void => setFilter({ ...filter, showHealthy: true }), |
||||
}, |
||||
unhealthy: { |
||||
active: !showHealthy, |
||||
className: `unhealthy ${styles.btn}`, |
||||
color: 'primary', |
||||
onClick: (): void => setFilter({ ...filter, showHealthy: false }), |
||||
}, |
||||
}; |
||||
return ( |
||||
<ButtonGroup> |
||||
<Button {...btnProps.all}>All</Button> |
||||
<Button {...btnProps.unhealthy}>Unhealthy</Button> |
||||
</ButtonGroup> |
||||
); |
||||
}; |
||||
|
||||
export default Filter; |
||||
@ -0,0 +1,98 @@ |
||||
import * as React from 'react'; |
||||
import { mount, shallow, ReactWrapper } from 'enzyme'; |
||||
import { act } from 'react-dom/test-utils'; |
||||
import { Alert } from 'reactstrap'; |
||||
import { sampleApiResponse } from './__testdata__/testdata'; |
||||
import ScrapePoolList from './ScrapePoolList'; |
||||
import ScrapePoolPanel from './ScrapePoolPanel'; |
||||
import { Target } from './target'; |
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; |
||||
import { faSpinner } from '@fortawesome/free-solid-svg-icons'; |
||||
|
||||
describe('Flags', () => { |
||||
const defaultProps = { |
||||
filter: { showHealthy: true, showUnhealthy: true }, |
||||
pathPrefix: '..', |
||||
}; |
||||
|
||||
beforeEach(() => { |
||||
fetch.resetMocks(); |
||||
}); |
||||
|
||||
describe('before data is returned', () => { |
||||
const scrapePoolList = shallow(<ScrapePoolList {...defaultProps} />); |
||||
const spinner = scrapePoolList.find(FontAwesomeIcon); |
||||
|
||||
it('renders a spinner', () => { |
||||
expect(spinner.prop('icon')).toEqual(faSpinner); |
||||
expect(spinner.prop('spin')).toBe(true); |
||||
}); |
||||
|
||||
it('renders exactly one spinner', () => { |
||||
expect(spinner).toHaveLength(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('when data is returned', () => { |
||||
let scrapePoolList: ReactWrapper; |
||||
let mock: Promise<Response>; |
||||
beforeEach(() => { |
||||
//Tooltip requires DOM elements to exist. They do not in enzyme rendering so we must manually create them.
|
||||
const scrapePools: { [key: string]: number } = { blackbox: 3, node_exporter: 1, prometheus: 1 }; |
||||
Object.keys(scrapePools).forEach((pool: string): void => { |
||||
Array.from(Array(scrapePools[pool]).keys()).forEach((idx: number): void => { |
||||
const div = document.createElement('div'); |
||||
div.id = `series-labels-${pool}-${idx}`; |
||||
document.body.appendChild(div); |
||||
}); |
||||
}); |
||||
mock = fetch.mockResponse(JSON.stringify(sampleApiResponse)); |
||||
}); |
||||
|
||||
it('renders a table', async () => { |
||||
await act(async () => { |
||||
scrapePoolList = mount(<ScrapePoolList {...defaultProps} />); |
||||
}); |
||||
scrapePoolList.update(); |
||||
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); |
||||
const panels = scrapePoolList.find(ScrapePoolPanel); |
||||
expect(panels).toHaveLength(3); |
||||
const activeTargets: Target[] = sampleApiResponse.data.activeTargets as Target[]; |
||||
activeTargets.forEach(({ scrapePool }: Target) => { |
||||
const panel = scrapePoolList.find(ScrapePoolPanel).filterWhere(panel => panel.prop('scrapePool') === scrapePool); |
||||
expect(panel).toHaveLength(1); |
||||
}); |
||||
}); |
||||
|
||||
it('filters by health', async () => { |
||||
const props = { |
||||
...defaultProps, |
||||
filter: { showHealthy: false, showUnhealthy: true }, |
||||
}; |
||||
await act(async () => { |
||||
scrapePoolList = mount(<ScrapePoolList {...props} />); |
||||
}); |
||||
scrapePoolList.update(); |
||||
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); |
||||
const panels = scrapePoolList.find(ScrapePoolPanel); |
||||
expect(panels).toHaveLength(0); |
||||
}); |
||||
}); |
||||
|
||||
describe('when an error is returned', () => { |
||||
it('displays an alert', async () => { |
||||
const mock = fetch.mockReject(new Error('Error fetching targets')); |
||||
|
||||
let scrapePoolList: ReactWrapper; |
||||
await act(async () => { |
||||
scrapePoolList = mount(<ScrapePoolList {...defaultProps} />); |
||||
}); |
||||
scrapePoolList.update(); |
||||
|
||||
expect(mock).toHaveBeenCalledWith('../api/v1/targets?state=active', undefined); |
||||
const alert = scrapePoolList.find(Alert); |
||||
expect(alert.prop('color')).toBe('danger'); |
||||
expect(alert.text()).toContain('Error fetching targets'); |
||||
}); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,55 @@ |
||||
import React, { FC } from 'react'; |
||||
import { FilterData } from './Filter'; |
||||
import { useFetch } from '../../utils/useFetch'; |
||||
import { ScrapePool, groupTargets } from './target'; |
||||
import ScrapePoolPanel from './ScrapePoolPanel'; |
||||
import PathPrefixProps from '../../PathPrefixProps'; |
||||
import { FontAwesomeIcon } from '@fortawesome/react-fontawesome'; |
||||
import { faSpinner } from '@fortawesome/free-solid-svg-icons'; |
||||
import { Alert } from 'reactstrap'; |
||||
|
||||
interface ScrapePoolListProps { |
||||
filter: FilterData; |
||||
} |
||||
|
||||
const filterByHealth = ({ upCount, targets }: ScrapePool, { showHealthy, showUnhealthy }: FilterData): boolean => { |
||||
const isHealthy = upCount === targets.length; |
||||
return (isHealthy && showHealthy) || (!isHealthy && showUnhealthy); |
||||
}; |
||||
|
||||
const ScrapePoolList: FC<ScrapePoolListProps & PathPrefixProps> = ({ filter, pathPrefix }) => { |
||||
const { response, error } = useFetch(`${pathPrefix}/api/v1/targets?state=active`); |
||||
|
||||
if (error) { |
||||
return ( |
||||
<Alert color="danger"> |
||||
<strong>Error fetching targets:</strong> {error.message} |
||||
</Alert> |
||||
); |
||||
} else if (response && response.status !== 'success') { |
||||
return ( |
||||
<Alert color="danger"> |
||||
<strong>Error fetching targets:</strong> {response.status} |
||||
</Alert> |
||||
); |
||||
} else if (response && response.data) { |
||||
const { activeTargets } = response.data; |
||||
const targetGroups = groupTargets(activeTargets); |
||||
return ( |
||||
<> |
||||
{Object.keys(targetGroups) |
||||
.filter((scrapePool: string) => filterByHealth(targetGroups[scrapePool], filter)) |
||||
.map((scrapePool: string) => { |
||||
const targetGroupProps = { |
||||
scrapePool, |
||||
targetGroup: targetGroups[scrapePool], |
||||
}; |
||||
return <ScrapePoolPanel key={scrapePool} {...targetGroupProps} />; |
||||
})} |
||||
</> |
||||
); |
||||
} |
||||
return <FontAwesomeIcon icon={faSpinner} spin />; |
||||
}; |
||||
|
||||
export default ScrapePoolList; |
||||
@ -0,0 +1,43 @@ |
||||
.container { |
||||
margin-top: -12px; |
||||
} |
||||
|
||||
.title { |
||||
font-size: 20px; |
||||
font-weight: bold; |
||||
cursor: pointer; |
||||
} |
||||
|
||||
.normal { |
||||
composes: title; |
||||
} |
||||
|
||||
.danger { |
||||
composes: title; |
||||
color: rgb(242, 65, 65); |
||||
} |
||||
|
||||
.table { |
||||
width: 100%; |
||||
} |
||||
|
||||
.cell { |
||||
height: auto; |
||||
word-wrap: break-word; |
||||
word-break: break-all; |
||||
} |
||||
|
||||
.endpoint, .labels { |
||||
composes: cell; |
||||
width: 25%; |
||||
} |
||||
|
||||
.state, .last-scrape { |
||||
composes: cell; |
||||
width: 10%; |
||||
} |
||||
|
||||
.errors { |
||||
composes: cell; |
||||
width: 30%; |
||||
} |
||||
@ -0,0 +1,142 @@ |
||||
import React from 'react'; |
||||
import { mount, shallow } from 'enzyme'; |
||||
import { targetGroups } from './__testdata__/testdata'; |
||||
import ScrapePoolPanel, { columns } from './ScrapePoolPanel'; |
||||
import { Button, Collapse, Table, Badge } from 'reactstrap'; |
||||
import { Target, getColor } from './target'; |
||||
import EndpointLink from './EndpointLink'; |
||||
import TargetLabels from './TargetLabels'; |
||||
|
||||
describe('ScrapePoolPanel', () => { |
||||
const defaultProps = { |
||||
scrapePool: 'blackbox', |
||||
targetGroup: targetGroups.blackbox, |
||||
}; |
||||
const scrapePoolPanel = shallow(<ScrapePoolPanel {...defaultProps} />); |
||||
|
||||
it('renders a container', () => { |
||||
const div = scrapePoolPanel.find('div').filterWhere(elem => elem.hasClass('container')); |
||||
expect(div).toHaveLength(1); |
||||
}); |
||||
|
||||
describe('Header', () => { |
||||
it('renders an h3', () => { |
||||
expect(scrapePoolPanel.find('h3')).toHaveLength(1); |
||||
}); |
||||
|
||||
it('renders an anchor with up count and danger color if upCount < targetsCount', () => { |
||||
const anchor = scrapePoolPanel.find('a'); |
||||
expect(anchor).toHaveLength(1); |
||||
expect(anchor.prop('id')).toEqual('pool-blackbox'); |
||||
expect(anchor.prop('href')).toEqual('#pool-blackbox'); |
||||
expect(anchor.text()).toEqual('blackbox (2/3 up)'); |
||||
expect(anchor.prop('className')).toEqual('danger'); |
||||
}); |
||||
|
||||
it('renders an anchor with up count and normal color if upCount == targetsCount', () => { |
||||
const props = { |
||||
scrapePool: 'prometheus', |
||||
targetGroup: targetGroups.prometheus, |
||||
}; |
||||
const scrapePoolPanel = shallow(<ScrapePoolPanel {...props} />); |
||||
const anchor = scrapePoolPanel.find('a'); |
||||
expect(anchor).toHaveLength(1); |
||||
expect(anchor.prop('id')).toEqual('pool-prometheus'); |
||||
expect(anchor.prop('href')).toEqual('#pool-prometheus'); |
||||
expect(anchor.text()).toEqual('prometheus (1/1 up)'); |
||||
expect(anchor.prop('className')).toEqual('normal'); |
||||
}); |
||||
|
||||
it('renders a show less btn if expanded', () => { |
||||
const btn = scrapePoolPanel.find(Button); |
||||
expect(btn).toHaveLength(1); |
||||
expect(btn.prop('color')).toEqual('primary'); |
||||
expect(btn.prop('size')).toEqual('xs'); |
||||
expect(btn.render().text()).toEqual('show less'); |
||||
}); |
||||
|
||||
it('renders a show more btn if collapsed', () => { |
||||
const props = { |
||||
scrapePool: 'prometheus', |
||||
targetGroup: targetGroups.prometheus, |
||||
}; |
||||
const div = document.createElement('div'); |
||||
div.id = `series-labels-prometheus-0`; |
||||
document.body.appendChild(div); |
||||
const scrapePoolPanel = mount(<ScrapePoolPanel {...props} />); |
||||
|
||||
const btn = scrapePoolPanel.find(Button); |
||||
btn.simulate('click'); |
||||
expect(btn.render().text()).toEqual('show more'); |
||||
const collapse = scrapePoolPanel.find(Collapse); |
||||
expect(collapse.prop('isOpen')).toBe(false); |
||||
}); |
||||
}); |
||||
|
||||
it('renders a Collapse component', () => { |
||||
const collapse = scrapePoolPanel.find(Collapse); |
||||
expect(collapse.prop('isOpen')).toBe(true); |
||||
}); |
||||
|
||||
describe('Table', () => { |
||||
it('renders a table', () => { |
||||
const table = scrapePoolPanel.find(Table); |
||||
const headers = table.find('th'); |
||||
expect(table).toHaveLength(1); |
||||
expect(headers).toHaveLength(6); |
||||
columns.forEach(col => { |
||||
expect(headers.contains(col)); |
||||
}); |
||||
}); |
||||
|
||||
describe('for each target', () => { |
||||
const table = scrapePoolPanel.find(Table); |
||||
defaultProps.targetGroup.targets.forEach( |
||||
({ discoveredLabels, labels, scrapeUrl, lastError, health }: Target, idx: number) => { |
||||
const row = table.find('tr').at(idx + 1); |
||||
|
||||
it('renders an EndpointLink with the scrapeUrl', () => { |
||||
const link = row.find(EndpointLink); |
||||
expect(link).toHaveLength(1); |
||||
expect(link.prop('endpoint')).toEqual(scrapeUrl); |
||||
}); |
||||
|
||||
it('renders a badge for health', () => { |
||||
const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('state'))); |
||||
const badge = td.find(Badge); |
||||
expect(badge).toHaveLength(1); |
||||
expect(badge.prop('color')).toEqual(getColor(health)); |
||||
expect(badge.children().text()).toEqual(health.toUpperCase()); |
||||
}); |
||||
|
||||
it('renders series labels', () => { |
||||
const targetLabels = row.find(TargetLabels); |
||||
expect(targetLabels).toHaveLength(1); |
||||
expect(targetLabels.prop('discoveredLabels')).toEqual(discoveredLabels); |
||||
expect(targetLabels.prop('labels')).toEqual(labels); |
||||
}); |
||||
|
||||
it('renders last scrape time', () => { |
||||
const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('last-scrape'))); |
||||
expect(lastScrapeCell).toHaveLength(1); |
||||
}); |
||||
|
||||
it('renders last scrape duration', () => { |
||||
const lastScrapeCell = row.find('td').filterWhere(elem => Boolean(elem.hasClass('scrape-duration'))); |
||||
expect(lastScrapeCell).toHaveLength(1); |
||||
}); |
||||
|
||||
it('renders a badge for Errors', () => { |
||||
const td = row.find('td').filterWhere(elem => Boolean(elem.hasClass('errors'))); |
||||
const badge = td.find(Badge); |
||||
expect(badge).toHaveLength(lastError ? 1 : 0); |
||||
if (lastError) { |
||||
expect(badge.prop('color')).toEqual('danger'); |
||||
expect(badge.children().text()).toEqual(lastError); |
||||
} |
||||
}); |
||||
} |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,95 @@ |
||||
import React, { FC } from 'react'; |
||||
import { ScrapePool, getColor } from './target'; |
||||
import { Button, Collapse, Table, Badge } from 'reactstrap'; |
||||
import styles from './ScrapePoolPanel.module.css'; |
||||
import { Target } from './target'; |
||||
import EndpointLink from './EndpointLink'; |
||||
import TargetLabels from './TargetLabels'; |
||||
import { formatRelative, humanizeDuration } from '../../utils/timeFormat'; |
||||
import { now } from 'moment'; |
||||
import { useLocalStorage } from '../../hooks/useLocalStorage'; |
||||
|
||||
interface PanelProps { |
||||
scrapePool: string; |
||||
targetGroup: ScrapePool; |
||||
} |
||||
|
||||
export const columns = ['Endpoint', 'State', 'Labels', 'Last Scrape', 'Scrape Duration', 'Error']; |
||||
|
||||
const ScrapePoolPanel: FC<PanelProps> = ({ scrapePool, targetGroup }) => { |
||||
const [{ expanded }, setOptions] = useLocalStorage(`targets-${scrapePool}-expanded`, { expanded: true }); |
||||
const modifier = targetGroup.upCount < targetGroup.targets.length ? 'danger' : 'normal'; |
||||
const id = `pool-${scrapePool}`; |
||||
const anchorProps = { |
||||
href: `#${id}`, |
||||
id, |
||||
}; |
||||
const btnProps = { |
||||
children: `show ${expanded ? 'less' : 'more'}`, |
||||
color: 'primary', |
||||
onClick: (): void => setOptions({ expanded: !expanded }), |
||||
size: 'xs', |
||||
style: { |
||||
padding: '0.3em 0.3em 0.25em 0.3em', |
||||
fontSize: '0.375em', |
||||
marginLeft: '1em', |
||||
verticalAlign: 'baseline', |
||||
}, |
||||
}; |
||||
|
||||
return ( |
||||
<div className={styles.container}> |
||||
<h3> |
||||
<a className={styles[modifier]} {...anchorProps}> |
||||
{`${scrapePool} (${targetGroup.upCount}/${targetGroup.targets.length} up)`} |
||||
</a> |
||||
<Button {...btnProps} /> |
||||
</h3> |
||||
<Collapse isOpen={expanded}> |
||||
<Table className={styles.table} size="sm" bordered hover striped> |
||||
<thead> |
||||
<tr key="header"> |
||||
{columns.map(column => ( |
||||
<th key={column}>{column}</th> |
||||
))} |
||||
</tr> |
||||
</thead> |
||||
<tbody> |
||||
{targetGroup.targets.map((target: Target, idx: number) => { |
||||
const { |
||||
discoveredLabels, |
||||
labels, |
||||
scrapePool, |
||||
scrapeUrl, |
||||
lastError, |
||||
lastScrape, |
||||
lastScrapeDuration, |
||||
health, |
||||
} = target; |
||||
const color = getColor(health); |
||||
|
||||
return ( |
||||
<tr key={scrapeUrl}> |
||||
<td className={styles.endpoint}> |
||||
<EndpointLink endpoint={scrapeUrl} /> |
||||
</td> |
||||
<td className={styles.state}> |
||||
<Badge color={color}>{health.toUpperCase()}</Badge> |
||||
</td> |
||||
<td className={styles.labels}> |
||||
<TargetLabels discoveredLabels={discoveredLabels} labels={labels} scrapePool={scrapePool} idx={idx} /> |
||||
</td> |
||||
<td className={styles['last-scrape']}>{formatRelative(lastScrape, now())}</td> |
||||
<td className={styles['scrape-duration']}>{humanizeDuration(lastScrapeDuration * 1000)}</td> |
||||
<td className={styles.errors}>{lastError ? <Badge color={color}>{lastError}</Badge> : null}</td> |
||||
</tr> |
||||
); |
||||
})} |
||||
</tbody> |
||||
</Table> |
||||
</Collapse> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
export default ScrapePoolPanel; |
||||
@ -0,0 +1,3 @@ |
||||
.discovered { |
||||
white-space: nowrap; |
||||
} |
||||
@ -0,0 +1,50 @@ |
||||
import * as React from 'react'; |
||||
import { shallow } from 'enzyme'; |
||||
import TargetLabels from './TargetLabels'; |
||||
import { Tooltip, Badge } from 'reactstrap'; |
||||
import toJson from 'enzyme-to-json'; |
||||
|
||||
describe('targetLabels', () => { |
||||
const defaultProps = { |
||||
discoveredLabels: { |
||||
__address__: 'localhost:9100', |
||||
__metrics_path__: '/metrics', |
||||
__scheme__: 'http', |
||||
job: 'node_exporter', |
||||
}, |
||||
labels: { |
||||
instance: 'localhost:9100', |
||||
job: 'node_exporter', |
||||
foo: 'bar', |
||||
}, |
||||
idx: 1, |
||||
scrapePool: 'cortex/node-exporter_group/0', |
||||
}; |
||||
const targetLabels = shallow(<TargetLabels {...defaultProps} />); |
||||
|
||||
it('renders a div of series labels', () => { |
||||
const div = targetLabels.find('div').filterWhere(elem => elem.hasClass('series-labels-container')); |
||||
expect(div).toHaveLength(1); |
||||
expect(div.prop('id')).toEqual('series-labels-cortex/node-exporter_group/0-1'); |
||||
}); |
||||
|
||||
it('wraps each label in a label badge', () => { |
||||
const l: { [key: string]: string } = defaultProps.labels; |
||||
Object.keys(l).forEach((labelName: string): void => { |
||||
const badge = targetLabels.find(Badge).filterWhere(badge => badge.hasClass(labelName)); |
||||
expect(badge.children().text()).toEqual(`${labelName}="${l[labelName]}"`); |
||||
}); |
||||
expect(targetLabels.find(Badge)).toHaveLength(3); |
||||
}); |
||||
|
||||
it('renders a tooltip for discovered labels', () => { |
||||
const tooltip = targetLabels.find(Tooltip); |
||||
expect(tooltip).toHaveLength(1); |
||||
expect(tooltip.prop('isOpen')).toBe(false); |
||||
expect(tooltip.prop('target')).toEqual('series-labels-cortex/node-exporter_group/0-1'); |
||||
}); |
||||
|
||||
it('renders discovered labels', () => { |
||||
expect(toJson(targetLabels)).toMatchSnapshot(); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,48 @@ |
||||
import React, { FC, Fragment, useState } from 'react'; |
||||
import { Badge, Tooltip } from 'reactstrap'; |
||||
import styles from './TargetLabels.module.css'; |
||||
|
||||
interface Labels { |
||||
[key: string]: string; |
||||
} |
||||
|
||||
export interface TargetLabelsProps { |
||||
discoveredLabels: Labels; |
||||
labels: Labels; |
||||
idx: number; |
||||
scrapePool: string; |
||||
} |
||||
|
||||
const formatLabels = (labels: Labels): string[] => Object.keys(labels).map(key => `${key}="${labels[key]}"`); |
||||
|
||||
const TargetLabels: FC<TargetLabelsProps> = ({ discoveredLabels, labels, idx, scrapePool }) => { |
||||
const [tooltipOpen, setTooltipOpen] = useState(false); |
||||
|
||||
const toggle = (): void => setTooltipOpen(!tooltipOpen); |
||||
const id = `series-labels-${scrapePool}-${idx}`; |
||||
|
||||
return ( |
||||
<> |
||||
<div id={id} className="series-labels-container"> |
||||
{Object.keys(labels).map(labelName => { |
||||
return ( |
||||
<Badge color="primary" className={`mr-1 ${labelName}`} key={labelName}> |
||||
{`${labelName}="${labels[labelName]}"`} |
||||
</Badge> |
||||
); |
||||
})} |
||||
</div> |
||||
<Tooltip isOpen={tooltipOpen} target={id} toggle={toggle} style={{ maxWidth: 'none', textAlign: 'left' }}> |
||||
<b>Before relabeling:</b> |
||||
{formatLabels(discoveredLabels).map((s: string, idx: number) => ( |
||||
<Fragment key={idx}> |
||||
<br /> |
||||
<span className={styles.discovered}>{s}</span> |
||||
</Fragment> |
||||
))} |
||||
</Tooltip> |
||||
</> |
||||
); |
||||
}; |
||||
|
||||
export default TargetLabels; |
||||
@ -0,0 +1,33 @@ |
||||
import React from 'react'; |
||||
import { shallow } from 'enzyme'; |
||||
import Targets from './Targets'; |
||||
import Filter from './Filter'; |
||||
import ScrapePoolList from './ScrapePoolList'; |
||||
|
||||
describe('Targets', () => { |
||||
const defaultProps = { |
||||
pathPrefix: '..', |
||||
}; |
||||
const targets = shallow(<Targets {...defaultProps} />); |
||||
describe('Header', () => { |
||||
const h2 = targets.find('h2'); |
||||
it('renders a header', () => { |
||||
expect(h2.text()).toEqual('Targets'); |
||||
}); |
||||
it('renders exactly one header', () => { |
||||
const h2 = targets.find('h2'); |
||||
expect(h2).toHaveLength(1); |
||||
}); |
||||
}); |
||||
it('renders a filter', () => { |
||||
const filter = targets.find(Filter); |
||||
expect(filter).toHaveLength(1); |
||||
expect(filter.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true }); |
||||
}); |
||||
it('renders a scrape pool list', () => { |
||||
const scrapePoolList = targets.find(ScrapePoolList); |
||||
expect(scrapePoolList).toHaveLength(1); |
||||
expect(scrapePoolList.prop('filter')).toEqual({ showHealthy: true, showUnhealthy: true }); |
||||
expect(scrapePoolList.prop('pathPrefix')).toEqual(defaultProps.pathPrefix); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,22 @@ |
||||
import React, { FC } from 'react'; |
||||
import { RouteComponentProps } from '@reach/router'; |
||||
import Filter from './Filter'; |
||||
import ScrapePoolList from './ScrapePoolList'; |
||||
import PathPrefixProps from '../../PathPrefixProps'; |
||||
import { useLocalStorage } from '../../hooks/useLocalStorage'; |
||||
|
||||
const Targets: FC<RouteComponentProps & PathPrefixProps> = ({ pathPrefix }) => { |
||||
const [filter, setFilter] = useLocalStorage('targets-page-filter', { showHealthy: true, showUnhealthy: true }); |
||||
const filterProps = { filter, setFilter }; |
||||
const scrapePoolListProps = { filter, pathPrefix }; |
||||
|
||||
return ( |
||||
<> |
||||
<h2>Targets</h2> |
||||
<Filter {...filterProps} /> |
||||
<ScrapePoolList {...scrapePoolListProps} /> |
||||
</> |
||||
); |
||||
}; |
||||
|
||||
export default Targets; |
||||
@ -0,0 +1,81 @@ |
||||
// Jest Snapshot v1, https://goo.gl/fbAQLP |
||||
|
||||
exports[`targetLabels renders discovered labels 1`] = ` |
||||
<Fragment> |
||||
<div |
||||
className="series-labels-container" |
||||
id="series-labels-cortex/node-exporter_group/0-1" |
||||
> |
||||
<Badge |
||||
className="mr-1 instance" |
||||
color="primary" |
||||
key="instance" |
||||
pill={false} |
||||
tag="span" |
||||
> |
||||
instance="localhost:9100" |
||||
</Badge> |
||||
<Badge |
||||
className="mr-1 job" |
||||
color="primary" |
||||
key="job" |
||||
pill={false} |
||||
tag="span" |
||||
> |
||||
job="node_exporter" |
||||
</Badge> |
||||
<Badge |
||||
className="mr-1 foo" |
||||
color="primary" |
||||
key="foo" |
||||
pill={false} |
||||
tag="span" |
||||
> |
||||
foo="bar" |
||||
</Badge> |
||||
</div> |
||||
<Tooltip |
||||
autohide={true} |
||||
isOpen={false} |
||||
placement="top" |
||||
placementPrefix="bs-tooltip" |
||||
style={ |
||||
Object { |
||||
"maxWidth": "none", |
||||
"textAlign": "left", |
||||
} |
||||
} |
||||
target="series-labels-cortex/node-exporter_group/0-1" |
||||
toggle={[Function]} |
||||
trigger="click hover focus" |
||||
> |
||||
<b> |
||||
Before relabeling: |
||||
</b> |
||||
<br /> |
||||
<span |
||||
className="discovered" |
||||
> |
||||
__address__="localhost:9100" |
||||
</span> |
||||
<br /> |
||||
<span |
||||
className="discovered" |
||||
> |
||||
__metrics_path__="/metrics" |
||||
</span> |
||||
<br /> |
||||
<span |
||||
className="discovered" |
||||
> |
||||
__scheme__="http" |
||||
</span> |
||||
<br /> |
||||
<span |
||||
className="discovered" |
||||
> |
||||
job="node_exporter" |
||||
</span> |
||||
</Tooltip> |
||||
</Fragment> |
||||
`; |
||||
@ -0,0 +1,215 @@ |
||||
/* eslint @typescript-eslint/camelcase: 0 */ |
||||
|
||||
import { ScrapePools, Target, Labels } from '../target'; |
||||
|
||||
export const targetGroups: ScrapePools = Object.freeze({ |
||||
blackbox: { |
||||
upCount: 2, |
||||
targets: [ |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'http://prometheus.io', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'http://prometheus.io', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:14.759299-07:00', |
||||
lastScrapeDuration: 36560147, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'https://prometheus.io', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'https://prometheus.io', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:24.731096-07:00', |
||||
lastScrapeDuration: 49448763, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'http://example.com:8080', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'http://example.com:8080', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:13.516654-07:00', |
||||
lastScrapeDuration: 120916592, |
||||
health: 'down', |
||||
}, |
||||
], |
||||
}, |
||||
node_exporter: { |
||||
upCount: 1, |
||||
targets: [ |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'localhost:9100', |
||||
__metrics_path__: '/metrics', |
||||
__scheme__: 'http', |
||||
job: 'node_exporter', |
||||
}, |
||||
labels: { |
||||
instance: 'localhost:9100', |
||||
job: 'node_exporter', |
||||
}, |
||||
scrapePool: 'node_exporter', |
||||
scrapeUrl: 'http://localhost:9100/metrics', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:14.145703-07:00', |
||||
lastScrapeDuration: 3842307, |
||||
health: 'up', |
||||
}, |
||||
], |
||||
}, |
||||
prometheus: { |
||||
upCount: 1, |
||||
targets: [ |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'localhost:9090', |
||||
__metrics_path__: '/metrics', |
||||
__scheme__: 'http', |
||||
job: 'prometheus', |
||||
}, |
||||
labels: { |
||||
instance: 'localhost:9090', |
||||
job: 'prometheus', |
||||
}, |
||||
scrapePool: 'prometheus', |
||||
scrapeUrl: 'http://localhost:9090/metrics', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:18.479731-07:00', |
||||
lastScrapeDuration: 4050976, |
||||
health: 'up', |
||||
}, |
||||
], |
||||
}, |
||||
}); |
||||
|
||||
export const sampleApiResponse = Object.freeze({ |
||||
status: 'success', |
||||
data: { |
||||
activeTargets: [ |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'http://prometheus.io', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'http://prometheus.io', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fprometheus.io', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:14.759299-07:00', |
||||
lastScrapeDuration: 36560147, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'https://prometheus.io', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'https://prometheus.io', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=https%3A%2F%2Fprometheus.io', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:24.731096-07:00', |
||||
lastScrapeDuration: 49448763, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'http://example.com:8080', |
||||
__metrics_path__: '/probe', |
||||
__param_module: 'http_2xx', |
||||
__scheme__: 'http', |
||||
job: 'blackbox', |
||||
}, |
||||
labels: { |
||||
instance: 'http://example.com:8080', |
||||
job: 'blackbox', |
||||
}, |
||||
scrapePool: 'blackbox', |
||||
scrapeUrl: 'http://127.0.0.1:9115/probe?module=http_2xx&target=http%3A%2F%2Fexample.com%3A8080', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:13.516654-07:00', |
||||
lastScrapeDuration: 120916592, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'localhost:9100', |
||||
__metrics_path__: '/metrics', |
||||
__scheme__: 'http', |
||||
job: 'node_exporter', |
||||
}, |
||||
labels: { |
||||
instance: 'localhost:9100', |
||||
job: 'node_exporter', |
||||
}, |
||||
scrapePool: 'node_exporter', |
||||
scrapeUrl: 'http://localhost:9100/metrics', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:14.145703-07:00', |
||||
lastScrapeDuration: 3842307, |
||||
health: 'up', |
||||
}, |
||||
{ |
||||
discoveredLabels: { |
||||
__address__: 'localhost:9090', |
||||
__metrics_path__: '/metrics', |
||||
__scheme__: 'http', |
||||
job: 'prometheus', |
||||
}, |
||||
labels: { |
||||
instance: 'localhost:9090', |
||||
job: 'prometheus', |
||||
}, |
||||
scrapePool: 'prometheus', |
||||
scrapeUrl: 'http://localhost:9090/metrics', |
||||
lastError: '', |
||||
lastScrape: '2019-11-04T11:52:18.479731-07:00', |
||||
lastScrapeDuration: 4050976, |
||||
health: 'up', |
||||
}, |
||||
], |
||||
}, |
||||
}); |
||||
@ -0,0 +1,45 @@ |
||||
/* eslint @typescript-eslint/camelcase: 0 */ |
||||
|
||||
import { sampleApiResponse } from './__testdata__/testdata'; |
||||
import { groupTargets, Target, ScrapePools, getColor } from './target'; |
||||
import { string } from 'prop-types'; |
||||
|
||||
describe('groupTargets', () => { |
||||
const targets: Target[] = sampleApiResponse.data.activeTargets as Target[]; |
||||
const targetGroups: ScrapePools = groupTargets(targets); |
||||
|
||||
it('groups a list of targets by scrape job', () => { |
||||
['blackbox', 'prometheus', 'node_exporter'].forEach(scrapePool => { |
||||
expect(Object.keys(targetGroups)).toContain(scrapePool); |
||||
}); |
||||
Object.keys(targetGroups).forEach((scrapePool: string): void => { |
||||
const ts: Target[] = targetGroups[scrapePool].targets; |
||||
ts.forEach((t: Target) => { |
||||
expect(t.scrapePool).toEqual(scrapePool); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
it('adds upCount during aggregation', () => { |
||||
const testCases: { [key: string]: number } = { blackbox: 3, prometheus: 1, node_exporter: 1 }; |
||||
Object.keys(testCases).forEach((scrapePool: string): void => { |
||||
expect(targetGroups[scrapePool].upCount).toEqual(testCases[scrapePool]); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('getColor', () => { |
||||
const testCases: { color: string; status: string }[] = [ |
||||
{ color: 'danger', status: 'down' }, |
||||
{ color: 'danger', status: 'DOWN' }, |
||||
{ color: 'warning', status: 'unknown' }, |
||||
{ color: 'warning', status: 'foo' }, |
||||
{ color: 'success', status: 'up' }, |
||||
{ color: 'success', status: 'Up' }, |
||||
]; |
||||
testCases.forEach(({ color, status }) => { |
||||
it(`returns ${color} for ${status} status`, () => { |
||||
expect(getColor(status)).toEqual(color); |
||||
}); |
||||
}); |
||||
}); |
||||
@ -0,0 +1,49 @@ |
||||
export interface Labels { |
||||
[key: string]: string; |
||||
} |
||||
|
||||
export interface Target { |
||||
discoveredLabels: Labels; |
||||
labels: Labels; |
||||
scrapePool: string; |
||||
scrapeUrl: string; |
||||
lastError: string; |
||||
lastScrape: string; |
||||
lastScrapeDuration: number; |
||||
health: string; |
||||
} |
||||
|
||||
export interface ScrapePool { |
||||
upCount: number; |
||||
targets: Target[]; |
||||
} |
||||
|
||||
export interface ScrapePools { |
||||
[scrapePool: string]: ScrapePool; |
||||
} |
||||
|
||||
export const groupTargets = (targets: Target[]): ScrapePools => |
||||
targets.reduce((pools: ScrapePools, target: Target) => { |
||||
const { health, scrapePool } = target; |
||||
const up = health.toLowerCase() === 'up' ? 1 : 0; |
||||
if (!pools[scrapePool]) { |
||||
pools[scrapePool] = { |
||||
upCount: 0, |
||||
targets: [], |
||||
}; |
||||
} |
||||
pools[scrapePool].targets.push(target); |
||||
pools[scrapePool].upCount += up; |
||||
return pools; |
||||
}, {}); |
||||
|
||||
export const getColor = (health: string): string => { |
||||
switch (health.toLowerCase()) { |
||||
case 'up': |
||||
return 'success'; |
||||
case 'down': |
||||
return 'danger'; |
||||
default: |
||||
return 'warning'; |
||||
} |
||||
}; |
||||
Loading…
Reference in new issue