mirror of https://github.com/grafana/grafana
Explore: Use PanelQueryState to handle querying (#18694)
* WIP: inital POC * Wip: Moving forward * Wip * Refactor: Makes loading indicator work for Prometheus * Refactor: Reverts prom observable queries because they did not work for multiple targets * Refactor: Transforms all epics into thunks * Fix: Fixes scanning * Fix: Fixes so that Instant and TimeSeries Prom query loads in parallel * Fix: Fixes negation logic error * Propagate errors in stream events, and close streamspull/16676/head^2
parent
f942fecc52
commit
5ca643f2ba
@ -1,25 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { map, throttleTime } from 'rxjs/operators'; |
||||
import { LoadingState } from '@grafana/data'; |
||||
|
||||
import { StoreState } from 'app/types'; |
||||
import { ActionOf } from '../../../../core/redux/actionCreatorFactory'; |
||||
import { limitMessageRatePayloadAction, LimitMessageRatePayload, processQueryResultsAction } from '../actionTypes'; |
||||
import { EpicDependencies } from 'app/store/configureStore'; |
||||
|
||||
export const limitMessageRateEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState, EpicDependencies> = action$ => { |
||||
return action$.ofType(limitMessageRatePayloadAction.type).pipe( |
||||
throttleTime(1), |
||||
map((action: ActionOf<LimitMessageRatePayload>) => { |
||||
const { exploreId, series, datasourceId } = action.payload; |
||||
return processQueryResultsAction({ |
||||
exploreId, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Streaming, |
||||
series: null, |
||||
delta: series, |
||||
}); |
||||
}) |
||||
); |
||||
}; |
@ -1,67 +0,0 @@ |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
import { epicTester } from 'test/core/redux/epicTester'; |
||||
import { processQueryErrorsAction, queryFailureAction } from '../actionTypes'; |
||||
import { processQueryErrorsEpic } from './processQueryErrorsEpic'; |
||||
|
||||
describe('processQueryErrorsEpic', () => { |
||||
let originalConsoleError = console.error; |
||||
|
||||
beforeEach(() => { |
||||
originalConsoleError = console.error; |
||||
console.error = jest.fn(); |
||||
}); |
||||
|
||||
afterEach(() => { |
||||
console.error = originalConsoleError; |
||||
}); |
||||
|
||||
describe('when processQueryErrorsAction is dispatched', () => { |
||||
describe('and datasourceInstance is the same', () => { |
||||
describe('and the response is not cancelled', () => { |
||||
it('then queryFailureAction is dispatched', () => { |
||||
const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); |
||||
const response = { message: 'Something went terribly wrong!' }; |
||||
|
||||
epicTester(processQueryErrorsEpic, state) |
||||
.whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) |
||||
.thenResultingActionsEqual(queryFailureAction({ exploreId, response })); |
||||
|
||||
expect(console.error).toBeCalledTimes(1); |
||||
expect(console.error).toBeCalledWith(response); |
||||
expect(eventBridge.emit).toBeCalledTimes(1); |
||||
expect(eventBridge.emit).toBeCalledWith('data-error', response); |
||||
}); |
||||
}); |
||||
|
||||
describe('and the response is cancelled', () => { |
||||
it('then no actions are dispatched', () => { |
||||
const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); |
||||
const response = { cancelled: true, message: 'Something went terribly wrong!' }; |
||||
|
||||
epicTester(processQueryErrorsEpic, state) |
||||
.whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId, response })) |
||||
.thenNoActionsWhereDispatched(); |
||||
|
||||
expect(console.error).not.toBeCalled(); |
||||
expect(eventBridge.emit).not.toBeCalled(); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and datasourceInstance is not the same', () => { |
||||
describe('and the response is not cancelled', () => { |
||||
it('then no actions are dispatched', () => { |
||||
const { exploreId, state, eventBridge } = mockExploreState(); |
||||
const response = { message: 'Something went terribly wrong!' }; |
||||
|
||||
epicTester(processQueryErrorsEpic, state) |
||||
.whenActionIsDispatched(processQueryErrorsAction({ exploreId, datasourceId: 'other id', response })) |
||||
.thenNoActionsWhereDispatched(); |
||||
|
||||
expect(console.error).not.toBeCalled(); |
||||
expect(eventBridge.emit).not.toBeCalled(); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,40 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { mergeMap } from 'rxjs/operators'; |
||||
import { NEVER, of } from 'rxjs'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { instanceOfDataQueryError } from 'app/core/utils/explore'; |
||||
import { toDataQueryError } from 'app/features/dashboard/state/PanelQueryState'; |
||||
import { processQueryErrorsAction, ProcessQueryErrorsPayload, queryFailureAction } from '../actionTypes'; |
||||
|
||||
export const processQueryErrorsEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState> = (action$, state$) => { |
||||
return action$.ofType(processQueryErrorsAction.type).pipe( |
||||
mergeMap((action: ActionOf<ProcessQueryErrorsPayload>) => { |
||||
const { exploreId, datasourceId } = action.payload; |
||||
let { response } = action.payload; |
||||
const { datasourceInstance, eventBridge } = state$.value.explore[exploreId]; |
||||
|
||||
if (datasourceInstance.meta.id !== datasourceId || response.cancelled) { |
||||
// Navigated away, queries did not matter
|
||||
return NEVER; |
||||
} |
||||
|
||||
// For Angular editors
|
||||
eventBridge.emit('data-error', response); |
||||
|
||||
console.error(response); // To help finding problems with query syntax
|
||||
|
||||
if (!instanceOfDataQueryError(response)) { |
||||
response = toDataQueryError(response); |
||||
} |
||||
|
||||
return of( |
||||
queryFailureAction({ |
||||
exploreId, |
||||
response, |
||||
}) |
||||
); |
||||
}) |
||||
); |
||||
}; |
@ -1,119 +0,0 @@ |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
import { epicTester, MOCKED_ABSOLUTE_RANGE } from 'test/core/redux/epicTester'; |
||||
import { |
||||
processQueryResultsAction, |
||||
resetQueryErrorAction, |
||||
querySuccessAction, |
||||
scanStopAction, |
||||
updateTimeRangeAction, |
||||
runQueriesAction, |
||||
} from '../actionTypes'; |
||||
import { DataFrame, LoadingState, toDataFrame } from '@grafana/data'; |
||||
import { processQueryResultsEpic } from './processQueryResultsEpic'; |
||||
import TableModel from 'app/core/table_model'; |
||||
|
||||
const testContext = () => { |
||||
const serieA: DataFrame = toDataFrame({ |
||||
fields: [], |
||||
refId: 'A', |
||||
}); |
||||
const serieB: DataFrame = toDataFrame({ |
||||
fields: [], |
||||
refId: 'B', |
||||
}); |
||||
const series = [serieA, serieB]; |
||||
const latency = 0; |
||||
const loadingState = LoadingState.Done; |
||||
|
||||
return { |
||||
latency, |
||||
series, |
||||
loadingState, |
||||
}; |
||||
}; |
||||
|
||||
describe('processQueryResultsEpic', () => { |
||||
describe('when processQueryResultsAction is dispatched', () => { |
||||
describe('and datasourceInstance is the same', () => { |
||||
describe('and explore is not scanning', () => { |
||||
it('then resetQueryErrorAction and querySuccessAction are dispatched and eventBridge emits correct message', () => { |
||||
const { datasourceId, exploreId, state, eventBridge } = mockExploreState(); |
||||
const { latency, series, loadingState } = testContext(); |
||||
const graphResult: any[] = []; |
||||
const tableResult = new TableModel(); |
||||
const logsResult: any = null; |
||||
|
||||
epicTester(processQueryResultsEpic, state) |
||||
.whenActionIsDispatched( |
||||
processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) |
||||
) |
||||
.thenResultingActionsEqual( |
||||
resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), |
||||
querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }) |
||||
); |
||||
|
||||
expect(eventBridge.emit).toBeCalledTimes(1); |
||||
expect(eventBridge.emit).toBeCalledWith('data-received', series); |
||||
}); |
||||
}); |
||||
|
||||
describe('and explore is scanning', () => { |
||||
describe('and we have a result', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { datasourceId, exploreId, state } = mockExploreState({ scanning: true }); |
||||
const { latency, series, loadingState } = testContext(); |
||||
const graphResult: any[] = []; |
||||
const tableResult = new TableModel(); |
||||
const logsResult: any = null; |
||||
|
||||
epicTester(processQueryResultsEpic, state) |
||||
.whenActionIsDispatched( |
||||
processQueryResultsAction({ exploreId, datasourceId, loadingState, series, latency }) |
||||
) |
||||
.thenResultingActionsEqual( |
||||
resetQueryErrorAction({ exploreId, refIds: ['A', 'B'] }), |
||||
querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), |
||||
scanStopAction({ exploreId }) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and we do not have a result', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { datasourceId, exploreId, state } = mockExploreState({ scanning: true }); |
||||
const { latency, loadingState } = testContext(); |
||||
const graphResult: any[] = []; |
||||
const tableResult = new TableModel(); |
||||
const logsResult: any = null; |
||||
|
||||
epicTester(processQueryResultsEpic, state) |
||||
.whenActionIsDispatched( |
||||
processQueryResultsAction({ exploreId, datasourceId, loadingState, series: [], latency }) |
||||
) |
||||
.thenResultingActionsEqual( |
||||
resetQueryErrorAction({ exploreId, refIds: [] }), |
||||
querySuccessAction({ exploreId, loadingState, graphResult, tableResult, logsResult, latency }), |
||||
updateTimeRangeAction({ exploreId, absoluteRange: MOCKED_ABSOLUTE_RANGE }), |
||||
runQueriesAction({ exploreId }) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and datasourceInstance is not the same', () => { |
||||
it('then no actions are dispatched and eventBridge does not emit message', () => { |
||||
const { exploreId, state, eventBridge } = mockExploreState(); |
||||
const { series, loadingState } = testContext(); |
||||
|
||||
epicTester(processQueryResultsEpic, state) |
||||
.whenActionIsDispatched( |
||||
processQueryResultsAction({ exploreId, datasourceId: 'other id', loadingState, series, latency: 0 }) |
||||
) |
||||
.thenNoActionsWhereDispatched(); |
||||
|
||||
expect(eventBridge.emit).not.toBeCalled(); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,82 +0,0 @@ |
||||
import _ from 'lodash'; |
||||
import { Epic } from 'redux-observable'; |
||||
import { mergeMap } from 'rxjs/operators'; |
||||
import { NEVER } from 'rxjs'; |
||||
import { LoadingState } from '@grafana/data'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { getRefIds } from 'app/core/utils/explore'; |
||||
import { |
||||
processQueryResultsAction, |
||||
ProcessQueryResultsPayload, |
||||
querySuccessAction, |
||||
resetQueryErrorAction, |
||||
scanStopAction, |
||||
updateTimeRangeAction, |
||||
runQueriesAction, |
||||
} from '../actionTypes'; |
||||
import { ResultProcessor } from '../../utils/ResultProcessor'; |
||||
|
||||
export const processQueryResultsEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState> = ( |
||||
action$, |
||||
state$, |
||||
{ getTimeZone, getShiftedTimeRange } |
||||
) => { |
||||
return action$.ofType(processQueryResultsAction.type).pipe( |
||||
mergeMap((action: ActionOf<ProcessQueryResultsPayload>) => { |
||||
const { exploreId, datasourceId, latency, loadingState, series, delta } = action.payload; |
||||
const { datasourceInstance, scanning, eventBridge } = state$.value.explore[exploreId]; |
||||
|
||||
// If datasource already changed, results do not matter
|
||||
if (datasourceInstance.meta.id !== datasourceId) { |
||||
return NEVER; |
||||
} |
||||
|
||||
const result = series || delta || []; |
||||
const replacePreviousResults = loadingState === LoadingState.Done && series && !delta ? true : false; |
||||
const resultProcessor = new ResultProcessor(state$.value.explore[exploreId], replacePreviousResults, result); |
||||
const graphResult = resultProcessor.getGraphResult(); |
||||
const tableResult = resultProcessor.getTableResult(); |
||||
const logsResult = resultProcessor.getLogsResult(); |
||||
const refIds = getRefIds(result); |
||||
const actions: Array<ActionOf<any>> = []; |
||||
|
||||
// For Angular editors
|
||||
eventBridge.emit('data-received', resultProcessor.getRawData()); |
||||
|
||||
// Clears any previous errors that now have a successful query, important so Angular editors are updated correctly
|
||||
actions.push( |
||||
resetQueryErrorAction({ |
||||
exploreId, |
||||
refIds, |
||||
}) |
||||
); |
||||
|
||||
actions.push( |
||||
querySuccessAction({ |
||||
exploreId, |
||||
latency, |
||||
loadingState, |
||||
graphResult, |
||||
tableResult, |
||||
logsResult, |
||||
}) |
||||
); |
||||
|
||||
// Keep scanning for results if this was the last scanning transaction
|
||||
if (scanning) { |
||||
if (_.size(result) === 0) { |
||||
const range = getShiftedTimeRange(-1, state$.value.explore[exploreId].range, getTimeZone(state$.value.user)); |
||||
actions.push(updateTimeRangeAction({ exploreId, absoluteRange: range })); |
||||
actions.push(runQueriesAction({ exploreId })); |
||||
} else { |
||||
// We can stop scanning if we have a result
|
||||
actions.push(scanStopAction({ exploreId })); |
||||
} |
||||
} |
||||
|
||||
return actions; |
||||
}) |
||||
); |
||||
}; |
@ -1,425 +0,0 @@ |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
import { epicTester } from 'test/core/redux/epicTester'; |
||||
import { runQueriesBatchEpic } from './runQueriesBatchEpic'; |
||||
import { |
||||
runQueriesBatchAction, |
||||
queryStartAction, |
||||
historyUpdatedAction, |
||||
processQueryResultsAction, |
||||
processQueryErrorsAction, |
||||
limitMessageRatePayloadAction, |
||||
resetExploreAction, |
||||
updateDatasourceInstanceAction, |
||||
changeRefreshIntervalAction, |
||||
clearQueriesAction, |
||||
stateSaveAction, |
||||
} from '../actionTypes'; |
||||
import { LoadingState, DataFrame, FieldType, DataFrameHelper } from '@grafana/data'; |
||||
import { DataQueryRequest } from '@grafana/ui'; |
||||
|
||||
const testContext = () => { |
||||
const series: DataFrame[] = [ |
||||
new DataFrameHelper({ |
||||
fields: [ |
||||
{ |
||||
name: 'Value', |
||||
values: [], |
||||
}, |
||||
{ |
||||
name: 'Time', |
||||
type: FieldType.time, |
||||
config: { |
||||
unit: 'dateTimeAsIso', |
||||
}, |
||||
values: [], |
||||
}, |
||||
], |
||||
refId: 'A', |
||||
}), |
||||
]; |
||||
const response = { data: series }; |
||||
|
||||
return { |
||||
response, |
||||
series, |
||||
}; |
||||
}; |
||||
|
||||
describe('runQueriesBatchEpic', () => { |
||||
let originalDateNow = Date.now; |
||||
beforeEach(() => { |
||||
originalDateNow = Date.now; |
||||
Date.now = () => 1337; |
||||
}); |
||||
|
||||
afterEach(() => { |
||||
Date.now = originalDateNow; |
||||
}); |
||||
|
||||
describe('when runQueriesBatchAction is dispatched', () => { |
||||
describe('and query targets are not live', () => { |
||||
describe('and query is successful', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId } = mockExploreState(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and query is not successful', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const error = { |
||||
message: 'Error parsing line x', |
||||
}; |
||||
const { exploreId, state, datasourceId } = mockExploreState(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryThrowsError(error) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
processQueryErrorsAction({ exploreId, response: error, datasourceId }) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and query targets are live', () => { |
||||
describe('and state equals Streaming', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { exploreId, state, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
const serieA: any = { |
||||
fields: [], |
||||
rows: [], |
||||
refId: 'A', |
||||
}; |
||||
const serieB: any = { |
||||
fields: [], |
||||
rows: [], |
||||
refId: 'B', |
||||
}; |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryObserverReceivesEvent({ |
||||
state: LoadingState.Streaming, |
||||
delta: [serieA], |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
unsubscribe, |
||||
}) |
||||
.whenQueryObserverReceivesEvent({ |
||||
state: LoadingState.Streaming, |
||||
delta: [serieB], |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
unsubscribe, |
||||
}) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
limitMessageRatePayloadAction({ exploreId, series: [serieA], datasourceId }), |
||||
limitMessageRatePayloadAction({ exploreId, series: [serieB], datasourceId }) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and state equals Error', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { exploreId, state, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
const error = { message: 'Something went really wrong!' }; |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryObserverReceivesEvent({ |
||||
state: LoadingState.Error, |
||||
error, |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
unsubscribe, |
||||
}) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
processQueryErrorsAction({ exploreId, response: error, datasourceId }) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and state equals Done', () => { |
||||
it('then correct actions are dispatched', () => { |
||||
const { exploreId, state, datasourceId, history } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
const serieA: any = { |
||||
fields: [], |
||||
rows: [], |
||||
refId: 'A', |
||||
}; |
||||
const serieB: any = { |
||||
fields: [], |
||||
rows: [], |
||||
refId: 'B', |
||||
}; |
||||
const delta = [serieA, serieB]; |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryObserverReceivesEvent({ |
||||
state: LoadingState.Done, |
||||
data: null, |
||||
delta, |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
unsubscribe, |
||||
}) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta, |
||||
series: null, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and another runQueriesBatchAction is dispatched', () => { |
||||
it('then the observable should be unsubscribed', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) // first observable
|
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.whenActionIsDispatched( |
||||
// second observable and unsubscribes the first observable
|
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: true, interval: '', maxDataPoints: 800 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), // output from first observable
|
||||
historyUpdatedAction({ exploreId, history }), // output from first observable
|
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction(), |
||||
// output from first observable
|
||||
queryStartAction({ exploreId }), // output from second observable
|
||||
historyUpdatedAction({ exploreId, history }), // output from second observable
|
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
// output from second observable
|
||||
); |
||||
|
||||
expect(unsubscribe).toBeCalledTimes(1); // first unsubscribe should be called but not second as that isn't unsubscribed
|
||||
}); |
||||
}); |
||||
|
||||
describe('and resetExploreAction is dispatched', () => { |
||||
it('then the observable should be unsubscribed', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.whenActionIsDispatched(resetExploreAction()) // unsubscribes the observable
|
||||
.whenQueryReceivesResponse(response) // new updates will not reach anywhere
|
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
|
||||
expect(unsubscribe).toBeCalledTimes(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('and updateDatasourceInstanceAction is dispatched', () => { |
||||
it('then the observable should be unsubscribed', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId, datasourceInstance } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.whenActionIsDispatched(updateDatasourceInstanceAction({ exploreId, datasourceInstance })) // unsubscribes the observable
|
||||
.whenQueryReceivesResponse(response) // new updates will not reach anywhere
|
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
|
||||
expect(unsubscribe).toBeCalledTimes(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('and changeRefreshIntervalAction is dispatched', () => { |
||||
it('then the observable should be unsubscribed', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.whenActionIsDispatched(changeRefreshIntervalAction({ exploreId, refreshInterval: '' })) // unsubscribes the observable
|
||||
.whenQueryReceivesResponse(response) // new updates will not reach anywhere
|
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
|
||||
expect(unsubscribe).toBeCalledTimes(1); |
||||
}); |
||||
}); |
||||
|
||||
describe('and clearQueriesAction is dispatched', () => { |
||||
it('then the observable should be unsubscribed', () => { |
||||
const { response, series } = testContext(); |
||||
const { exploreId, state, history, datasourceId } = mockExploreState(); |
||||
const unsubscribe = jest.fn(); |
||||
|
||||
epicTester(runQueriesBatchEpic, state) |
||||
.whenActionIsDispatched( |
||||
runQueriesBatchAction({ exploreId, queryOptions: { live: false, interval: '', maxDataPoints: 1980 } }) |
||||
) |
||||
.whenQueryReceivesResponse(response) |
||||
.whenQueryObserverReceivesEvent({ |
||||
key: 'some key', |
||||
request: {} as DataQueryRequest, |
||||
state: LoadingState.Loading, // fake just to setup and test unsubscribe
|
||||
unsubscribe, |
||||
}) |
||||
.whenActionIsDispatched(clearQueriesAction({ exploreId })) // unsubscribes the observable
|
||||
.whenQueryReceivesResponse(response) // new updates will not reach anywhere
|
||||
.thenResultingActionsEqual( |
||||
queryStartAction({ exploreId }), |
||||
historyUpdatedAction({ exploreId, history }), |
||||
processQueryResultsAction({ |
||||
exploreId, |
||||
delta: null, |
||||
series, |
||||
latency: 0, |
||||
datasourceId, |
||||
loadingState: LoadingState.Done, |
||||
}), |
||||
stateSaveAction() |
||||
); |
||||
|
||||
expect(unsubscribe).toBeCalledTimes(1); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,231 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { Observable, Subject } from 'rxjs'; |
||||
import { mergeMap, catchError, takeUntil, filter } from 'rxjs/operators'; |
||||
import _, { isString } from 'lodash'; |
||||
import { isLive } from '@grafana/ui/src/components/RefreshPicker/RefreshPicker'; |
||||
import { DataStreamState, DataQueryResponse, DataQueryResponseData } from '@grafana/ui'; |
||||
|
||||
import { LoadingState, DataFrame, AbsoluteTimeRange } from '@grafana/data'; |
||||
import { dateMath } from '@grafana/data'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { buildQueryTransaction, updateHistory } from 'app/core/utils/explore'; |
||||
import { |
||||
clearQueriesAction, |
||||
historyUpdatedAction, |
||||
resetExploreAction, |
||||
updateDatasourceInstanceAction, |
||||
changeRefreshIntervalAction, |
||||
processQueryErrorsAction, |
||||
processQueryResultsAction, |
||||
runQueriesBatchAction, |
||||
RunQueriesBatchPayload, |
||||
queryStartAction, |
||||
limitMessageRatePayloadAction, |
||||
stateSaveAction, |
||||
changeRangeAction, |
||||
} from '../actionTypes'; |
||||
import { ExploreId, ExploreItemState } from 'app/types'; |
||||
|
||||
const publishActions = (outerObservable: Subject<any>, actions: Array<ActionOf<any>>) => { |
||||
for (const action of actions) { |
||||
outerObservable.next(action); |
||||
} |
||||
}; |
||||
|
||||
interface ProcessResponseConfig { |
||||
exploreId: ExploreId; |
||||
exploreItemState: ExploreItemState; |
||||
datasourceId: string; |
||||
now: number; |
||||
loadingState: LoadingState; |
||||
series?: DataQueryResponseData[]; |
||||
delta?: DataFrame[]; |
||||
} |
||||
|
||||
const processResponse = (config: ProcessResponseConfig) => { |
||||
const { exploreId, exploreItemState, datasourceId, now, loadingState, series, delta } = config; |
||||
const { queries, history } = exploreItemState; |
||||
const latency = Date.now() - now; |
||||
|
||||
// Side-effect: Saving history in localstorage
|
||||
const nextHistory = updateHistory(history, datasourceId, queries); |
||||
return [ |
||||
historyUpdatedAction({ exploreId, history: nextHistory }), |
||||
processQueryResultsAction({ exploreId, latency, datasourceId, loadingState, series, delta }), |
||||
stateSaveAction(), |
||||
]; |
||||
}; |
||||
|
||||
interface ProcessErrorConfig { |
||||
exploreId: ExploreId; |
||||
datasourceId: string; |
||||
error: any; |
||||
} |
||||
|
||||
const processError = (config: ProcessErrorConfig) => { |
||||
const { exploreId, datasourceId, error } = config; |
||||
|
||||
return [processQueryErrorsAction({ exploreId, response: error, datasourceId })]; |
||||
}; |
||||
|
||||
export const runQueriesBatchEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState> = ( |
||||
action$, |
||||
state$, |
||||
{ getQueryResponse } |
||||
) => { |
||||
return action$.ofType(runQueriesBatchAction.type).pipe( |
||||
mergeMap((action: ActionOf<RunQueriesBatchPayload>) => { |
||||
const { exploreId, queryOptions } = action.payload; |
||||
const exploreItemState = state$.value.explore[exploreId]; |
||||
const { datasourceInstance, queries, queryIntervals, range, scanning } = exploreItemState; |
||||
|
||||
// Create an observable per run queries action
|
||||
// Within the observable create two subscriptions
|
||||
// First subscription: 'querySubscription' subscribes to the call to query method on datasourceinstance
|
||||
// Second subscription: 'streamSubscription' subscribes to events from the query methods observer callback
|
||||
const observable: Observable<ActionOf<any>> = Observable.create((outerObservable: Subject<any>) => { |
||||
const datasourceId = datasourceInstance.meta.id; |
||||
const transaction = buildQueryTransaction(queries, queryOptions, range, queryIntervals, scanning); |
||||
outerObservable.next(queryStartAction({ exploreId })); |
||||
|
||||
const now = Date.now(); |
||||
let datasourceUnsubscribe: Function = null; |
||||
const streamHandler = new Subject<DataStreamState>(); |
||||
const observer = (event: DataStreamState) => { |
||||
datasourceUnsubscribe = event.unsubscribe; |
||||
if (!streamHandler.closed) { |
||||
// their might be a race condition when unsubscribing
|
||||
streamHandler.next(event); |
||||
} |
||||
}; |
||||
|
||||
// observer subscription, handles datasourceInstance.query observer events and pushes that forward
|
||||
const streamSubscription = streamHandler.subscribe({ |
||||
next: event => { |
||||
const { state, error, data, delta } = event; |
||||
if (!data && !delta && !error) { |
||||
return; |
||||
} |
||||
|
||||
if (state === LoadingState.Error) { |
||||
const actions = processError({ exploreId, datasourceId, error }); |
||||
publishActions(outerObservable, actions); |
||||
} |
||||
|
||||
if (state === LoadingState.Streaming) { |
||||
if (event.request && event.request.range) { |
||||
let newRange = event.request.range; |
||||
let absoluteRange: AbsoluteTimeRange = { |
||||
from: newRange.from.valueOf(), |
||||
to: newRange.to.valueOf(), |
||||
}; |
||||
if (isString(newRange.raw.from)) { |
||||
newRange = { |
||||
from: dateMath.parse(newRange.raw.from, false), |
||||
to: dateMath.parse(newRange.raw.to, true), |
||||
raw: newRange.raw, |
||||
}; |
||||
absoluteRange = { |
||||
from: newRange.from.valueOf(), |
||||
to: newRange.to.valueOf(), |
||||
}; |
||||
} |
||||
outerObservable.next(changeRangeAction({ exploreId, range: newRange, absoluteRange })); |
||||
} |
||||
|
||||
outerObservable.next( |
||||
limitMessageRatePayloadAction({ |
||||
exploreId, |
||||
series: delta, |
||||
datasourceId, |
||||
}) |
||||
); |
||||
} |
||||
|
||||
if (state === LoadingState.Done || state === LoadingState.Loading) { |
||||
const actions = processResponse({ |
||||
exploreId, |
||||
exploreItemState, |
||||
datasourceId, |
||||
now, |
||||
loadingState: state, |
||||
series: null, |
||||
delta, |
||||
}); |
||||
publishActions(outerObservable, actions); |
||||
} |
||||
}, |
||||
}); |
||||
|
||||
// query subscription, handles datasourceInstance.query response and pushes that forward
|
||||
const querySubscription = getQueryResponse(datasourceInstance, transaction.options, observer) |
||||
.pipe( |
||||
mergeMap((response: DataQueryResponse) => { |
||||
return processResponse({ |
||||
exploreId, |
||||
exploreItemState, |
||||
datasourceId, |
||||
now, |
||||
loadingState: LoadingState.Done, |
||||
series: response && response.data ? response.data : [], |
||||
delta: null, |
||||
}); |
||||
}), |
||||
catchError(error => { |
||||
return processError({ exploreId, datasourceId, error }); |
||||
}) |
||||
) |
||||
.subscribe({ next: (action: ActionOf<any>) => outerObservable.next(action) }); |
||||
|
||||
// this unsubscribe method will be called when any of the takeUntil actions below happen
|
||||
const unsubscribe = () => { |
||||
if (datasourceUnsubscribe) { |
||||
datasourceUnsubscribe(); |
||||
} |
||||
querySubscription.unsubscribe(); |
||||
streamSubscription.unsubscribe(); |
||||
streamHandler.unsubscribe(); |
||||
outerObservable.unsubscribe(); |
||||
}; |
||||
|
||||
return unsubscribe; |
||||
}); |
||||
|
||||
return observable.pipe( |
||||
takeUntil( |
||||
action$ |
||||
.ofType( |
||||
runQueriesBatchAction.type, |
||||
resetExploreAction.type, |
||||
updateDatasourceInstanceAction.type, |
||||
changeRefreshIntervalAction.type, |
||||
clearQueriesAction.type |
||||
) |
||||
.pipe( |
||||
filter(action => { |
||||
if (action.type === resetExploreAction.type) { |
||||
return true; // stops all subscriptions if user navigates away
|
||||
} |
||||
|
||||
if (action.type === updateDatasourceInstanceAction.type && action.payload.exploreId === exploreId) { |
||||
return true; // stops subscriptions if user changes data source
|
||||
} |
||||
|
||||
if (action.type === changeRefreshIntervalAction.type && action.payload.exploreId === exploreId) { |
||||
return !isLive(action.payload.refreshInterval); // stops subscriptions if user changes refresh interval away from 'Live'
|
||||
} |
||||
|
||||
if (action.type === clearQueriesAction.type && action.payload.exploreId === exploreId) { |
||||
return true; // stops subscriptions if user clears all queries
|
||||
} |
||||
|
||||
return action.payload.exploreId === exploreId; |
||||
}) |
||||
) |
||||
) |
||||
); |
||||
}) |
||||
); |
||||
}; |
@ -1,71 +0,0 @@ |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
import { epicTester } from 'test/core/redux/epicTester'; |
||||
import { runQueriesAction, stateSaveAction, runQueriesBatchAction, clearQueriesAction } from '../actionTypes'; |
||||
import { runQueriesEpic } from './runQueriesEpic'; |
||||
|
||||
describe('runQueriesEpic', () => { |
||||
describe('when runQueriesAction is dispatched', () => { |
||||
describe('and there is no datasourceError', () => { |
||||
describe('and we have non empty queries', () => { |
||||
describe('and explore is not live', () => { |
||||
it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { |
||||
const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; |
||||
const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ queries }); |
||||
|
||||
epicTester(runQueriesEpic, state) |
||||
.whenActionIsDispatched(runQueriesAction({ exploreId })) |
||||
.thenResultingActionsEqual( |
||||
runQueriesBatchAction({ |
||||
exploreId, |
||||
queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: false }, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and explore is live', () => { |
||||
it('then runQueriesBatchAction and stateSaveAction are dispatched', () => { |
||||
const queries = [{ refId: 'A', key: '123456', expr: '{__filename__="some.log"}' }]; |
||||
const { exploreId, state, datasourceInterval, containerWidth } = mockExploreState({ |
||||
queries, |
||||
isLive: true, |
||||
streaming: true, |
||||
}); |
||||
|
||||
epicTester(runQueriesEpic, state) |
||||
.whenActionIsDispatched(runQueriesAction({ exploreId })) |
||||
.thenResultingActionsEqual( |
||||
runQueriesBatchAction({ |
||||
exploreId, |
||||
queryOptions: { interval: datasourceInterval, maxDataPoints: containerWidth, live: true }, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and we have no queries', () => { |
||||
it('then clearQueriesAction and stateSaveAction are dispatched', () => { |
||||
const queries: any[] = []; |
||||
const { exploreId, state } = mockExploreState({ queries }); |
||||
|
||||
epicTester(runQueriesEpic, state) |
||||
.whenActionIsDispatched(runQueriesAction({ exploreId })) |
||||
.thenResultingActionsEqual(clearQueriesAction({ exploreId }), stateSaveAction()); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and there is a datasourceError', () => { |
||||
it('then no actions are dispatched', () => { |
||||
const { exploreId, state } = mockExploreState({ |
||||
datasourceError: { message: 'Some error' }, |
||||
}); |
||||
|
||||
epicTester(runQueriesEpic, state) |
||||
.whenActionIsDispatched(runQueriesAction({ exploreId })) |
||||
.thenNoActionsWhereDispatched(); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,39 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { NEVER } from 'rxjs'; |
||||
import { mergeMap } from 'rxjs/operators'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { hasNonEmptyQuery } from 'app/core/utils/explore'; |
||||
import { |
||||
clearQueriesAction, |
||||
runQueriesAction, |
||||
RunQueriesPayload, |
||||
runQueriesBatchAction, |
||||
stateSaveAction, |
||||
} from '../actionTypes'; |
||||
|
||||
export const runQueriesEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState> = (action$, state$) => { |
||||
return action$.ofType(runQueriesAction.type).pipe( |
||||
mergeMap((action: ActionOf<RunQueriesPayload>) => { |
||||
const { exploreId } = action.payload; |
||||
const { datasourceInstance, queries, datasourceError, containerWidth, isLive } = state$.value.explore[exploreId]; |
||||
|
||||
if (datasourceError) { |
||||
// let's not run any queries if data source is in a faulty state
|
||||
return NEVER; |
||||
} |
||||
|
||||
if (!hasNonEmptyQuery(queries)) { |
||||
return [clearQueriesAction({ exploreId }), stateSaveAction()]; // Remember to save to state and update location
|
||||
} |
||||
|
||||
// Some datasource's query builders allow per-query interval limits,
|
||||
// but we're using the datasource interval limit for now
|
||||
const interval = datasourceInstance.interval; |
||||
const live = isLive; |
||||
|
||||
return [runQueriesBatchAction({ exploreId, queryOptions: { interval, maxDataPoints: containerWidth, live } })]; |
||||
}) |
||||
); |
||||
}; |
@ -1,62 +0,0 @@ |
||||
import { epicTester } from 'test/core/redux/epicTester'; |
||||
import { stateSaveEpic } from './stateSaveEpic'; |
||||
import { stateSaveAction, setUrlReplacedAction } from '../actionTypes'; |
||||
import { updateLocation } from 'app/core/actions/location'; |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
|
||||
describe('stateSaveEpic', () => { |
||||
describe('when stateSaveAction is dispatched', () => { |
||||
describe('and there is a left state', () => { |
||||
describe('and no split', () => { |
||||
it('then the correct actions are dispatched', () => { |
||||
const { exploreId, state } = mockExploreState(); |
||||
|
||||
epicTester(stateSaveEpic, state) |
||||
.whenActionIsDispatched(stateSaveAction()) |
||||
.thenResultingActionsEqual( |
||||
updateLocation({ |
||||
query: { orgId: '1', left: '["now-6h","now","test",{"mode":null},{"ui":[true,true,true,null]}]' }, |
||||
replace: true, |
||||
}), |
||||
setUrlReplacedAction({ exploreId }) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and explore is split', () => { |
||||
it('then the correct actions are dispatched', () => { |
||||
const { exploreId, state } = mockExploreState({ split: true }); |
||||
|
||||
epicTester(stateSaveEpic, state) |
||||
.whenActionIsDispatched(stateSaveAction()) |
||||
.thenResultingActionsEqual( |
||||
updateLocation({ |
||||
query: { |
||||
orgId: '1', |
||||
left: '["now-6h","now","test",{"mode":null},{"ui":[true,true,true,null]}]', |
||||
right: '["now-6h","now","test",{"mode":null},{"ui":[true,true,true,null]}]', |
||||
}, |
||||
replace: true, |
||||
}), |
||||
setUrlReplacedAction({ exploreId }) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and urlReplaced is true', () => { |
||||
it('then setUrlReplacedAction should not be dispatched', () => { |
||||
const { state } = mockExploreState({ urlReplaced: true }); |
||||
|
||||
epicTester(stateSaveEpic, state) |
||||
.whenActionIsDispatched(stateSaveAction()) |
||||
.thenResultingActionsEqual( |
||||
updateLocation({ |
||||
query: { orgId: '1', left: '["now-6h","now","test",{"mode":null},{"ui":[true,true,true,null]}]' }, |
||||
replace: false, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,75 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { mergeMap } from 'rxjs/operators'; |
||||
import { RawTimeRange, TimeRange } from '@grafana/data'; |
||||
import { isDateTime } from '@grafana/data'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { ExploreUrlState, ExploreId } from 'app/types/explore'; |
||||
import { clearQueryKeys, serializeStateToUrlParam } from 'app/core/utils/explore'; |
||||
import { updateLocation } from 'app/core/actions/location'; |
||||
import { setUrlReplacedAction, stateSaveAction } from '../actionTypes'; |
||||
|
||||
const toRawTimeRange = (range: TimeRange): RawTimeRange => { |
||||
let from = range.raw.from; |
||||
if (isDateTime(from)) { |
||||
from = from.valueOf().toString(10); |
||||
} |
||||
|
||||
let to = range.raw.to; |
||||
if (isDateTime(to)) { |
||||
to = to.valueOf().toString(10); |
||||
} |
||||
|
||||
return { |
||||
from, |
||||
to, |
||||
}; |
||||
}; |
||||
|
||||
export const stateSaveEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState> = (action$, state$) => { |
||||
return action$.ofType(stateSaveAction.type).pipe( |
||||
mergeMap(() => { |
||||
const { left, right, split } = state$.value.explore; |
||||
const orgId = state$.value.user.orgId.toString(); |
||||
const replace = left && left.urlReplaced === false; |
||||
const urlStates: { [index: string]: string } = { orgId }; |
||||
const leftUrlState: ExploreUrlState = { |
||||
datasource: left.datasourceInstance.name, |
||||
queries: left.queries.map(clearQueryKeys), |
||||
range: toRawTimeRange(left.range), |
||||
mode: left.mode, |
||||
ui: { |
||||
showingGraph: left.showingGraph, |
||||
showingLogs: true, |
||||
showingTable: left.showingTable, |
||||
dedupStrategy: left.dedupStrategy, |
||||
}, |
||||
}; |
||||
urlStates.left = serializeStateToUrlParam(leftUrlState, true); |
||||
if (split) { |
||||
const rightUrlState: ExploreUrlState = { |
||||
datasource: right.datasourceInstance.name, |
||||
queries: right.queries.map(clearQueryKeys), |
||||
range: toRawTimeRange(right.range), |
||||
mode: right.mode, |
||||
ui: { |
||||
showingGraph: right.showingGraph, |
||||
showingLogs: true, |
||||
showingTable: right.showingTable, |
||||
dedupStrategy: right.dedupStrategy, |
||||
}, |
||||
}; |
||||
|
||||
urlStates.right = serializeStateToUrlParam(rightUrlState, true); |
||||
} |
||||
|
||||
const actions: Array<ActionOf<any>> = [updateLocation({ query: urlStates, replace })]; |
||||
if (replace) { |
||||
actions.push(setUrlReplacedAction({ exploreId: ExploreId.left })); |
||||
} |
||||
|
||||
return actions; |
||||
}) |
||||
); |
||||
}; |
@ -1,105 +0,0 @@ |
||||
import { dateTime, DefaultTimeZone } from '@grafana/data'; |
||||
|
||||
import { epicTester } from 'test/core/redux/epicTester'; |
||||
import { mockExploreState } from 'test/mocks/mockExploreState'; |
||||
import { timeEpic } from './timeEpic'; |
||||
import { updateTimeRangeAction, changeRangeAction } from '../actionTypes'; |
||||
import { EpicDependencies } from 'app/store/configureStore'; |
||||
|
||||
const from = dateTime('2019-01-01 10:00:00.000Z'); |
||||
const to = dateTime('2019-01-01 16:00:00.000Z'); |
||||
const rawFrom = 'now-6h'; |
||||
const rawTo = 'now'; |
||||
const rangeMock = { |
||||
from, |
||||
to, |
||||
raw: { |
||||
from: rawFrom, |
||||
to: rawTo, |
||||
}, |
||||
}; |
||||
|
||||
describe('timeEpic', () => { |
||||
describe('when updateTimeRangeAction is dispatched', () => { |
||||
describe('and no rawRange is supplied', () => { |
||||
describe('and no absoluteRange is supplied', () => { |
||||
it('then the correct actions are dispatched', () => { |
||||
const { exploreId, state, range } = mockExploreState({ range: rangeMock }); |
||||
const absoluteRange = { from: range.from.valueOf(), to: range.to.valueOf() }; |
||||
const stateToTest = { ...state, user: { timeZone: 'browser', orgId: -1 } }; |
||||
const getTimeRange = jest.fn().mockReturnValue(rangeMock); |
||||
const dependencies: Partial<EpicDependencies> = { |
||||
getTimeRange, |
||||
}; |
||||
|
||||
epicTester(timeEpic, stateToTest, dependencies) |
||||
.whenActionIsDispatched(updateTimeRangeAction({ exploreId })) |
||||
.thenDependencyWasCalledTimes(1, 'getTimeSrv', 'init') |
||||
.thenDependencyWasCalledTimes(1, 'getTimeRange') |
||||
.thenDependencyWasCalledWith([DefaultTimeZone, rangeMock.raw], 'getTimeRange') |
||||
.thenResultingActionsEqual( |
||||
changeRangeAction({ |
||||
exploreId, |
||||
range, |
||||
absoluteRange, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
|
||||
describe('and absoluteRange is supplied', () => { |
||||
it('then the correct actions are dispatched', () => { |
||||
const { exploreId, state, range } = mockExploreState({ range: rangeMock }); |
||||
const absoluteRange = { from: range.from.valueOf(), to: range.to.valueOf() }; |
||||
const stateToTest = { ...state, user: { timeZone: 'browser', orgId: -1 } }; |
||||
const getTimeRange = jest.fn().mockReturnValue(rangeMock); |
||||
const dependencies: Partial<EpicDependencies> = { |
||||
getTimeRange, |
||||
}; |
||||
|
||||
epicTester(timeEpic, stateToTest, dependencies) |
||||
.whenActionIsDispatched(updateTimeRangeAction({ exploreId, absoluteRange })) |
||||
.thenDependencyWasCalledTimes(1, 'getTimeSrv', 'init') |
||||
.thenDependencyWasCalledTimes(1, 'getTimeRange') |
||||
.thenDependencyWasCalledWith([DefaultTimeZone, { from: null, to: null }], 'getTimeRange') |
||||
.thenDependencyWasCalledTimes(2, 'dateTimeForTimeZone') |
||||
.thenResultingActionsEqual( |
||||
changeRangeAction({ |
||||
exploreId, |
||||
range, |
||||
absoluteRange, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
describe('and rawRange is supplied', () => { |
||||
describe('and no absoluteRange is supplied', () => { |
||||
it('then the correct actions are dispatched', () => { |
||||
const { exploreId, state, range } = mockExploreState({ range: rangeMock }); |
||||
const rawRange = { from: 'now-5m', to: 'now' }; |
||||
const absoluteRange = { from: range.from.valueOf(), to: range.to.valueOf() }; |
||||
const stateToTest = { ...state, user: { timeZone: 'browser', orgId: -1 } }; |
||||
const getTimeRange = jest.fn().mockReturnValue(rangeMock); |
||||
const dependencies: Partial<EpicDependencies> = { |
||||
getTimeRange, |
||||
}; |
||||
|
||||
epicTester(timeEpic, stateToTest, dependencies) |
||||
.whenActionIsDispatched(updateTimeRangeAction({ exploreId, rawRange })) |
||||
.thenDependencyWasCalledTimes(1, 'getTimeSrv', 'init') |
||||
.thenDependencyWasCalledTimes(1, 'getTimeRange') |
||||
.thenDependencyWasCalledWith([DefaultTimeZone, rawRange], 'getTimeRange') |
||||
.thenResultingActionsEqual( |
||||
changeRangeAction({ |
||||
exploreId, |
||||
range, |
||||
absoluteRange, |
||||
}) |
||||
); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
||||
}); |
@ -1,47 +0,0 @@ |
||||
import { Epic } from 'redux-observable'; |
||||
import { map } from 'rxjs/operators'; |
||||
import { AbsoluteTimeRange, RawTimeRange } from '@grafana/data'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { updateTimeRangeAction, UpdateTimeRangePayload, changeRangeAction } from '../actionTypes'; |
||||
import { EpicDependencies } from 'app/store/configureStore'; |
||||
|
||||
export const timeEpic: Epic<ActionOf<any>, ActionOf<any>, StoreState, EpicDependencies> = ( |
||||
action$, |
||||
state$, |
||||
{ getTimeSrv, getTimeRange, getTimeZone, dateTimeForTimeZone } |
||||
) => { |
||||
return action$.ofType(updateTimeRangeAction.type).pipe( |
||||
map((action: ActionOf<UpdateTimeRangePayload>) => { |
||||
const { exploreId, absoluteRange: absRange, rawRange: actionRange } = action.payload; |
||||
const itemState = state$.value.explore[exploreId]; |
||||
const timeZone = getTimeZone(state$.value.user); |
||||
const { range: rangeInState } = itemState; |
||||
let rawRange: RawTimeRange = rangeInState.raw; |
||||
|
||||
if (absRange) { |
||||
rawRange = { |
||||
from: dateTimeForTimeZone(timeZone, absRange.from), |
||||
to: dateTimeForTimeZone(timeZone, absRange.to), |
||||
}; |
||||
} |
||||
|
||||
if (actionRange) { |
||||
rawRange = actionRange; |
||||
} |
||||
|
||||
const range = getTimeRange(timeZone, rawRange); |
||||
const absoluteRange: AbsoluteTimeRange = { from: range.from.valueOf(), to: range.to.valueOf() }; |
||||
|
||||
getTimeSrv().init({ |
||||
time: range.raw, |
||||
refresh: false, |
||||
getTimezone: () => timeZone, |
||||
timeRangeUpdated: (): any => undefined, |
||||
}); |
||||
|
||||
return changeRangeAction({ exploreId, range, absoluteRange }); |
||||
}) |
||||
); |
||||
}; |
@ -1,149 +0,0 @@ |
||||
import { Epic, ActionsObservable, StateObservable } from 'redux-observable'; |
||||
import { Subject } from 'rxjs'; |
||||
import { |
||||
DataSourceApi, |
||||
DataQuery, |
||||
DataSourceJsonData, |
||||
DataQueryRequest, |
||||
DataStreamObserver, |
||||
DataQueryResponse, |
||||
DataStreamState, |
||||
} from '@grafana/ui'; |
||||
import { DefaultTimeZone } from '@grafana/data'; |
||||
|
||||
import { ActionOf } from 'app/core/redux/actionCreatorFactory'; |
||||
import { StoreState } from 'app/types/store'; |
||||
import { EpicDependencies } from 'app/store/configureStore'; |
||||
import { TimeSrv } from 'app/features/dashboard/services/TimeSrv'; |
||||
import { DEFAULT_RANGE } from 'app/core/utils/explore'; |
||||
|
||||
export const MOCKED_ABSOLUTE_RANGE = { from: 1, to: 2 }; |
||||
|
||||
export const epicTester = ( |
||||
epic: Epic<ActionOf<any>, ActionOf<any>, StoreState, EpicDependencies>, |
||||
state?: Partial<StoreState>, |
||||
dependencies?: Partial<EpicDependencies> |
||||
) => { |
||||
const resultingActions: Array<ActionOf<any>> = []; |
||||
const action$ = new Subject<ActionOf<any>>(); |
||||
const state$ = new Subject<StoreState>(); |
||||
const actionObservable$ = new ActionsObservable(action$); |
||||
const stateObservable$ = new StateObservable(state$, (state as StoreState) || ({} as StoreState)); |
||||
const queryResponse$ = new Subject<DataQueryResponse>(); |
||||
const observer$ = new Subject<DataStreamState>(); |
||||
const getQueryResponse = ( |
||||
datasourceInstance: DataSourceApi<DataQuery, DataSourceJsonData>, |
||||
options: DataQueryRequest<DataQuery>, |
||||
observer?: DataStreamObserver |
||||
) => { |
||||
if (observer) { |
||||
observer$.subscribe({ next: event => observer(event) }); |
||||
} |
||||
return queryResponse$; |
||||
}; |
||||
const init = jest.fn(); |
||||
const getTimeSrv = (): TimeSrv => { |
||||
const timeSrvMock: TimeSrv = {} as TimeSrv; |
||||
|
||||
return Object.assign(timeSrvMock, { init }); |
||||
}; |
||||
|
||||
const getTimeRange = jest.fn().mockReturnValue(DEFAULT_RANGE); |
||||
|
||||
const getShiftedTimeRange = jest.fn().mockReturnValue(MOCKED_ABSOLUTE_RANGE); |
||||
|
||||
const getTimeZone = jest.fn().mockReturnValue(DefaultTimeZone); |
||||
|
||||
const dateTimeForTimeZone = jest.fn().mockReturnValue(null); |
||||
|
||||
const defaultDependencies: EpicDependencies = { |
||||
getQueryResponse, |
||||
getTimeSrv, |
||||
getTimeRange, |
||||
getTimeZone, |
||||
getShiftedTimeRange, |
||||
dateTimeForTimeZone, |
||||
}; |
||||
|
||||
const theDependencies: EpicDependencies = { ...defaultDependencies, ...dependencies }; |
||||
|
||||
epic(actionObservable$, stateObservable$, theDependencies).subscribe({ |
||||
next: action => resultingActions.push(action), |
||||
}); |
||||
|
||||
const whenActionIsDispatched = (action: ActionOf<any>) => { |
||||
action$.next(action); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const whenQueryReceivesResponse = (response: DataQueryResponse) => { |
||||
queryResponse$.next(response); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const whenQueryThrowsError = (error: any) => { |
||||
queryResponse$.error(error); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const whenQueryObserverReceivesEvent = (event: DataStreamState) => { |
||||
observer$.next(event); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const thenResultingActionsEqual = (...actions: Array<ActionOf<any>>) => { |
||||
expect(actions).toEqual(resultingActions); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const thenNoActionsWhereDispatched = () => { |
||||
expect(resultingActions).toEqual([]); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const getDependencyMock = (dependency: string, method?: string) => { |
||||
// @ts-ignore
|
||||
const dep = theDependencies[dependency]; |
||||
let mock = null; |
||||
if (dep instanceof Function) { |
||||
mock = method ? dep()[method] : dep(); |
||||
} else { |
||||
mock = method ? dep[method] : dep; |
||||
} |
||||
|
||||
return mock; |
||||
}; |
||||
|
||||
const thenDependencyWasCalledTimes = (times: number, dependency: string, method?: string) => { |
||||
const mock = getDependencyMock(dependency, method); |
||||
expect(mock).toBeCalledTimes(times); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const thenDependencyWasCalledWith = (args: any[], dependency: string, method?: string) => { |
||||
const mock = getDependencyMock(dependency, method); |
||||
expect(mock).toBeCalledWith(...args); |
||||
|
||||
return instance; |
||||
}; |
||||
|
||||
const instance = { |
||||
whenActionIsDispatched, |
||||
whenQueryReceivesResponse, |
||||
whenQueryThrowsError, |
||||
whenQueryObserverReceivesEvent, |
||||
thenResultingActionsEqual, |
||||
thenNoActionsWhereDispatched, |
||||
thenDependencyWasCalledTimes, |
||||
thenDependencyWasCalledWith, |
||||
}; |
||||
|
||||
return instance; |
||||
}; |
Loading…
Reference in new issue