mirror of https://github.com/grafana/grafana
Prometheus: Remove query assistant and related components (#100669)
* remove query assistant related components * remove export statement * remove grafana/llm from prometheus packages * remove extra package * revert unintended change * incorrect handling of managedPluginsInstall merge deletion * update yarn.lock * linting fix * linting fixeleijonmarck/datasource-permissions/query-only-for-query-path
parent
f0f8bb890c
commit
6eca5c09df
@ -1,148 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/PromQail.test.tsx
|
||||
import { render, screen, waitFor } from '@testing-library/react'; |
||||
import userEvent from '@testing-library/user-event'; |
||||
|
||||
import { DataSourceInstanceSettings, DataSourcePluginMeta } from '@grafana/data'; |
||||
|
||||
import { PrometheusDatasource } from '../../../datasource'; |
||||
import PromQlLanguageProvider from '../../../language_provider'; |
||||
import { EmptyLanguageProviderMock } from '../../../language_provider.mock'; |
||||
import { PromOptions } from '../../../types'; |
||||
import { PromVisualQuery } from '../../types'; |
||||
|
||||
import { PromQail, queryAssistanttestIds } from './PromQail'; |
||||
|
||||
// don't care about interaction tracking in our unit tests
|
||||
jest.mock('@grafana/runtime', () => ({ |
||||
...jest.requireActual('@grafana/runtime'), |
||||
reportInteraction: jest.fn(), |
||||
})); |
||||
|
||||
window.HTMLElement.prototype.scrollIntoView = jest.fn(); |
||||
|
||||
describe('PromQail', () => { |
||||
it('renders the drawer', async () => { |
||||
setup(defaultQuery); |
||||
await waitFor(() => { |
||||
expect(screen.getByText('Query advisor')).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('shows an option to not show security warning', async () => { |
||||
setup(defaultQuery); |
||||
await waitFor(() => { |
||||
expect(screen.getByText("Don't show this message again")).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('shows selected metric and asks for a prompt', async () => { |
||||
setup(defaultQuery); |
||||
|
||||
await clickSecurityButton(); |
||||
|
||||
await waitFor(() => { |
||||
expect(screen.getByText('random_metric')).toBeInTheDocument(); |
||||
expect(screen.getByText('Do you know what you want to query?')).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('displays a prompt when the user knows what they want to query', async () => { |
||||
setup(defaultQuery); |
||||
|
||||
await clickSecurityButton(); |
||||
|
||||
await waitFor(() => { |
||||
expect(screen.getByText('random_metric')).toBeInTheDocument(); |
||||
expect(screen.getByText('Do you know what you want to query?')).toBeInTheDocument(); |
||||
}); |
||||
|
||||
const aiPrompt = screen.getByTestId(queryAssistanttestIds.clickForAi); |
||||
|
||||
await userEvent.click(aiPrompt); |
||||
|
||||
await waitFor(() => { |
||||
expect(screen.getByText('What kind of data do you want to see with your metric?')).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('does not display a prompt when choosing historical', async () => { |
||||
setup(defaultQuery); |
||||
|
||||
await clickSecurityButton(); |
||||
|
||||
await waitFor(() => { |
||||
expect(screen.getByText('random_metric')).toBeInTheDocument(); |
||||
expect(screen.getByText('Do you know what you want to query?')).toBeInTheDocument(); |
||||
}); |
||||
|
||||
const historicalPrompt = screen.getByTestId(queryAssistanttestIds.clickForHistorical); |
||||
|
||||
await userEvent.click(historicalPrompt); |
||||
|
||||
await waitFor(() => { |
||||
expect(screen.queryByText('What kind of data do you want to see with your metric?')).toBeNull(); |
||||
}); |
||||
}); |
||||
}); |
||||
|
||||
const defaultQuery: PromVisualQuery = { |
||||
metric: 'random_metric', |
||||
labels: [], |
||||
operations: [], |
||||
}; |
||||
|
||||
function createDatasource(withLabels?: boolean) { |
||||
const languageProvider = new EmptyLanguageProviderMock() as unknown as PromQlLanguageProvider; |
||||
|
||||
languageProvider.metricsMetadata = { |
||||
'all-metrics': { |
||||
type: 'all-metrics-type', |
||||
help: 'all-metrics-help', |
||||
}, |
||||
a: { |
||||
type: 'counter', |
||||
help: 'a-metric-help', |
||||
}, |
||||
a_bucket: { |
||||
type: 'counter', |
||||
help: 'for functions', |
||||
}, |
||||
}; |
||||
|
||||
const datasource = new PrometheusDatasource( |
||||
{ |
||||
url: '', |
||||
jsonData: {}, |
||||
meta: {} as DataSourcePluginMeta, |
||||
} as DataSourceInstanceSettings<PromOptions>, |
||||
undefined, |
||||
languageProvider |
||||
); |
||||
return datasource; |
||||
} |
||||
|
||||
function createProps(query: PromVisualQuery, datasource: PrometheusDatasource) { |
||||
return { |
||||
datasource, |
||||
onChange: jest.fn(), |
||||
closeDrawer: jest.fn(), |
||||
query: query, |
||||
}; |
||||
} |
||||
|
||||
function setup(query: PromVisualQuery) { |
||||
const withLabels: boolean = query.labels.length > 0; |
||||
const datasource = createDatasource(withLabels); |
||||
const props = createProps(query, datasource); |
||||
|
||||
// render the drawer only
|
||||
const { container } = render(<PromQail {...props} />); |
||||
|
||||
return container; |
||||
} |
||||
|
||||
async function clickSecurityButton() { |
||||
const securityInfoButton = screen.getByTestId(queryAssistanttestIds.securityInfoButton); |
||||
|
||||
await userEvent.click(securityInfoButton); |
||||
} |
@ -1,616 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/PromQail.tsx
|
||||
import { css, cx } from '@emotion/css'; |
||||
import { PayloadAction, createSlice } from '@reduxjs/toolkit'; |
||||
import { useEffect, useReducer, useRef, useState } from 'react'; |
||||
|
||||
import { GrafanaTheme2, store } from '@grafana/data'; |
||||
import { reportInteraction } from '@grafana/runtime'; |
||||
import { Alert, Button, Checkbox, Input, Spinner, useTheme2 } from '@grafana/ui'; |
||||
|
||||
import { PrometheusDatasource } from '../../../datasource'; |
||||
import { PromVisualQuery } from '../../types'; |
||||
|
||||
import { QuerySuggestionContainer } from './QuerySuggestionContainer'; |
||||
// @ts-ignore until we can get these added for icons
|
||||
import AI_Logo_color from './resources/AI_Logo_color.svg'; |
||||
import { promQailExplain, promQailSuggest } from './state/helpers'; |
||||
import { createInteraction, initialState } from './state/state'; |
||||
import { Interaction, SuggestionType } from './types'; |
||||
|
||||
export type PromQailProps = { |
||||
query: PromVisualQuery; |
||||
closeDrawer: () => void; |
||||
onChange: (query: PromVisualQuery) => void; |
||||
datasource: PrometheusDatasource; |
||||
}; |
||||
|
||||
const SKIP_STARTING_MESSAGE = 'SKIP_STARTING_MESSAGE'; |
||||
|
||||
export const PromQail = (props: PromQailProps) => { |
||||
const { query, closeDrawer, onChange, datasource } = props; |
||||
const skipStartingMessage = store.getBool(SKIP_STARTING_MESSAGE, false); |
||||
|
||||
const [state, dispatch] = useReducer(stateSlice.reducer, initialState(query, !skipStartingMessage)); |
||||
|
||||
const [labelNames, setLabelNames] = useState<string[]>([]); |
||||
|
||||
const suggestions = state.interactions.reduce((acc, int) => acc + int.suggestions.length, 0); |
||||
|
||||
const responsesEndRef = useRef(null); |
||||
|
||||
const scrollToBottom = () => { |
||||
if (responsesEndRef) { |
||||
// @ts-ignore for React.MutableRefObject
|
||||
responsesEndRef?.current?.scrollIntoView({ behavior: 'smooth' }); |
||||
} |
||||
}; |
||||
|
||||
useEffect(() => { |
||||
// only scroll when an interaction has been added or the suggestions have been updated
|
||||
scrollToBottom(); |
||||
}, [state.interactions.length, suggestions]); |
||||
|
||||
useEffect(() => { |
||||
const fetchLabels = async () => { |
||||
let labelsIndex: Record<string, string[]> = await datasource.languageProvider.fetchLabelsWithMatch(query.metric); |
||||
setLabelNames(Object.keys(labelsIndex)); |
||||
}; |
||||
fetchLabels(); |
||||
}, [query, datasource]); |
||||
|
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme); |
||||
|
||||
return ( |
||||
<div className={styles.containerPadding}> |
||||
{/* Query Advisor */} |
||||
{/* header */} |
||||
<div className={styles.header}> |
||||
<h3>Query advisor</h3> |
||||
<Button icon="times" fill="text" variant="secondary" onClick={closeDrawer} /> |
||||
</div> |
||||
{/* Starting message */} |
||||
<div> |
||||
<div className={styles.iconSection}> |
||||
<img src={AI_Logo_color} alt="AI logo color" /> Assistant |
||||
</div> |
||||
{state.showStartingMessage ? ( |
||||
<> |
||||
<div className={styles.dataList}> |
||||
<ol> |
||||
<li className={styles.textPadding}> |
||||
Query Advisor suggests queries based on a metric and requests you type in. |
||||
</li> |
||||
<li className={styles.textPadding}> |
||||
Query Advisor sends Prometheus metrics, labels and metadata to the LLM provider you've configured. |
||||
Be sure to align its usage with your company's internal policies. |
||||
</li> |
||||
<li className={styles.textPadding}> |
||||
An AI-suggested query may not fully answer your question. Always take a moment to understand a query |
||||
before you use it. |
||||
</li> |
||||
</ol> |
||||
</div> |
||||
<Alert |
||||
title={''} |
||||
severity={'info'} |
||||
key={'promqail-llm-app'} |
||||
className={cx(styles.textPadding, styles.noMargin)} |
||||
> |
||||
Query Advisor is currently in Private Preview. Feedback is appreciated and can be provided on explanations |
||||
and suggestions. |
||||
</Alert> |
||||
|
||||
{/* don't show this message again, store in localstorage */} |
||||
<div className={styles.textPadding}> |
||||
<Checkbox |
||||
checked={state.indicateCheckbox} |
||||
value={state.indicateCheckbox} |
||||
onChange={() => { |
||||
const val = store.getBool(SKIP_STARTING_MESSAGE, false); |
||||
store.set(SKIP_STARTING_MESSAGE, !val); |
||||
dispatch(indicateCheckbox(!val)); |
||||
}} |
||||
label="Don't show this message again" |
||||
/> |
||||
</div> |
||||
<div className={styles.rightButtonsWrapper}> |
||||
<div className={styles.rightButtons}> |
||||
<Button className={styles.leftButton} fill="outline" variant="secondary" onClick={closeDrawer}> |
||||
Cancel |
||||
</Button> |
||||
<Button |
||||
fill="solid" |
||||
variant="primary" |
||||
onClick={() => dispatch(showStartingMessage(false))} |
||||
data-testid={queryAssistanttestIds.securityInfoButton} |
||||
> |
||||
Continue |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
</> |
||||
) : ( |
||||
<div className={styles.bodySmall}> |
||||
{/* MAKE THIS TABLE RESPONSIVE */} |
||||
{/* FIT SUPER LONG METRICS AND LABELS IN HERE */} |
||||
<div className={styles.textPadding}>Here is the metric you have selected:</div> |
||||
<div className={styles.infoContainerWrapper}> |
||||
<div className={styles.infoContainer}> |
||||
<table className={styles.metricTable}> |
||||
<tbody> |
||||
<tr> |
||||
<td className={styles.metricTableName}>metric</td> |
||||
<td className={styles.metricTableValue}>{state.query.metric}</td> |
||||
<td> |
||||
<Button |
||||
fill="outline" |
||||
variant="secondary" |
||||
onClick={closeDrawer} |
||||
className={styles.metricTableButton} |
||||
size={'sm'} |
||||
> |
||||
Choose new metric |
||||
</Button> |
||||
</td> |
||||
</tr> |
||||
{state.query.labels.map((label, idx) => { |
||||
const text = idx === 0 ? 'labels' : ''; |
||||
return ( |
||||
<tr key={`${label.label}-${idx}`}> |
||||
<td>{text}</td> |
||||
<td className={styles.metricTableValue}>{`${label.label}${label.op}${label.value}`}</td> |
||||
<td> </td> |
||||
</tr> |
||||
); |
||||
})} |
||||
</tbody> |
||||
</table> |
||||
</div> |
||||
</div> |
||||
|
||||
{/* Ask if you know what you want to query? */} |
||||
{!state.askForQueryHelp && state.interactions.length === 0 && ( |
||||
<> |
||||
<div className={styles.queryQuestion}>Do you know what you want to query?</div> |
||||
<div className={styles.rightButtonsWrapper}> |
||||
<div className={styles.rightButtons}> |
||||
<Button |
||||
className={styles.leftButton} |
||||
fill="solid" |
||||
variant="secondary" |
||||
data-testid={queryAssistanttestIds.clickForHistorical} |
||||
onClick={() => { |
||||
const isLoading = true; |
||||
const suggestionType = SuggestionType.Historical; |
||||
dispatch(addInteraction({ suggestionType, isLoading })); |
||||
reportInteraction('grafana_prometheus_promqail_know_what_you_want_to_query', { |
||||
promVisualQuery: query, |
||||
doYouKnow: 'no', |
||||
}); |
||||
promQailSuggest(dispatch, 0, query, labelNames, datasource); |
||||
}} |
||||
> |
||||
No |
||||
</Button> |
||||
<Button |
||||
fill="solid" |
||||
variant="primary" |
||||
data-testid={queryAssistanttestIds.clickForAi} |
||||
onClick={() => { |
||||
reportInteraction('grafana_prometheus_promqail_know_what_you_want_to_query', { |
||||
promVisualQuery: query, |
||||
doYouKnow: 'yes', |
||||
}); |
||||
const isLoading = false; |
||||
const suggestionType = SuggestionType.AI; |
||||
dispatch(addInteraction({ suggestionType, isLoading })); |
||||
}} |
||||
> |
||||
Yes |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
</> |
||||
)} |
||||
|
||||
{state.interactions.map((interaction: Interaction, idx: number) => { |
||||
return ( |
||||
<div key={idx}> |
||||
{interaction.suggestionType === SuggestionType.AI ? ( |
||||
<> |
||||
<div className={styles.textPadding}>What kind of data do you want to see with your metric?</div> |
||||
<div className={cx(styles.secondaryText, styles.bottomMargin)}> |
||||
<div>You do not need to enter in a metric or a label again in the prompt.</div> |
||||
<div>Example: I want to monitor request latency, not errors.</div> |
||||
</div> |
||||
<div className={styles.inputPadding}> |
||||
<Input |
||||
value={interaction.prompt} |
||||
spellCheck={false} |
||||
placeholder="Enter prompt" |
||||
disabled={interaction.suggestions.length > 0} |
||||
onChange={(e) => { |
||||
const prompt = e.currentTarget.value; |
||||
|
||||
const payload = { |
||||
idx: idx, |
||||
interaction: { ...interaction, prompt }, |
||||
}; |
||||
|
||||
dispatch(updateInteraction(payload)); |
||||
}} |
||||
/> |
||||
</div> |
||||
{interaction.suggestions.length === 0 ? ( |
||||
interaction.isLoading ? ( |
||||
<> |
||||
<div className={styles.loadingMessageContainer}> |
||||
Waiting for OpenAI <Spinner className={styles.floatRight} /> |
||||
</div> |
||||
</> |
||||
) : ( |
||||
<> |
||||
<div className={styles.rightButtonsWrapper}> |
||||
<div className={styles.rightButtons}> |
||||
<Button |
||||
className={styles.leftButton} |
||||
fill="outline" |
||||
variant="secondary" |
||||
onClick={closeDrawer} |
||||
> |
||||
Cancel |
||||
</Button> |
||||
<Button |
||||
className={styles.leftButton} |
||||
fill="outline" |
||||
variant="secondary" |
||||
onClick={() => { |
||||
// JUST SUGGEST QUERIES AND SHOW THE LIST
|
||||
const newInteraction: Interaction = { |
||||
...interaction, |
||||
suggestionType: SuggestionType.Historical, |
||||
isLoading: true, |
||||
}; |
||||
|
||||
const payload = { |
||||
idx: idx, |
||||
interaction: newInteraction, |
||||
}; |
||||
|
||||
reportInteraction('grafana_prometheus_promqail_suggest_query_instead', { |
||||
promVisualQuery: query, |
||||
}); |
||||
|
||||
dispatch(updateInteraction(payload)); |
||||
promQailSuggest(dispatch, idx, query, labelNames, datasource, newInteraction); |
||||
}} |
||||
> |
||||
Suggest queries instead |
||||
</Button> |
||||
<Button |
||||
fill="solid" |
||||
variant="primary" |
||||
data-testid={queryAssistanttestIds.submitPrompt + idx} |
||||
onClick={() => { |
||||
const newInteraction: Interaction = { |
||||
...interaction, |
||||
isLoading: true, |
||||
}; |
||||
|
||||
const payload = { |
||||
idx: idx, |
||||
interaction: newInteraction, |
||||
}; |
||||
|
||||
reportInteraction('grafana_prometheus_promqail_prompt_submitted', { |
||||
promVisualQuery: query, |
||||
prompt: interaction.prompt, |
||||
}); |
||||
|
||||
dispatch(updateInteraction(payload)); |
||||
// add the suggestions in the API call
|
||||
promQailSuggest(dispatch, idx, query, labelNames, datasource, interaction); |
||||
}} |
||||
> |
||||
Submit |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
</> |
||||
) |
||||
) : ( |
||||
// LIST OF SUGGESTED QUERIES FROM AI
|
||||
<QuerySuggestionContainer |
||||
suggestionType={SuggestionType.AI} |
||||
querySuggestions={interaction.suggestions} |
||||
closeDrawer={closeDrawer} |
||||
nextInteraction={() => { |
||||
const isLoading = false; |
||||
const suggestionType = SuggestionType.AI; |
||||
dispatch(addInteraction({ suggestionType, isLoading })); |
||||
}} |
||||
queryExplain={(suggIdx: number) => |
||||
interaction.suggestions[suggIdx].explanation === '' |
||||
? promQailExplain(dispatch, idx, query, interaction, suggIdx, datasource) |
||||
: interaction.suggestions[suggIdx].explanation |
||||
} |
||||
onChange={onChange} |
||||
prompt={interaction.prompt ?? ''} |
||||
/> |
||||
)} |
||||
</> |
||||
) : // HISTORICAL SUGGESTIONS
|
||||
interaction.isLoading ? ( |
||||
<> |
||||
<div className={styles.loadingMessageContainer}> |
||||
Waiting for OpenAI <Spinner className={styles.floatRight} /> |
||||
</div> |
||||
</> |
||||
) : ( |
||||
// LIST OF SUGGESTED QUERIES FROM HISTORICAL DATA
|
||||
<QuerySuggestionContainer |
||||
suggestionType={SuggestionType.Historical} |
||||
querySuggestions={interaction.suggestions} |
||||
closeDrawer={closeDrawer} |
||||
nextInteraction={() => { |
||||
const isLoading = false; |
||||
const suggestionType = SuggestionType.AI; |
||||
dispatch(addInteraction({ suggestionType, isLoading })); |
||||
}} |
||||
queryExplain={(suggIdx: number) => |
||||
interaction.suggestions[suggIdx].explanation === '' |
||||
? promQailExplain(dispatch, idx, query, interaction, suggIdx, datasource) |
||||
: interaction.suggestions[suggIdx].explanation |
||||
} |
||||
onChange={onChange} |
||||
prompt={interaction.prompt ?? ''} |
||||
/> |
||||
)} |
||||
</div> |
||||
); |
||||
})} |
||||
</div> |
||||
)} |
||||
</div> |
||||
<div ref={responsesEndRef} /> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
export const getStyles = (theme: GrafanaTheme2) => { |
||||
return { |
||||
sectionPadding: css({ |
||||
padding: '20px', |
||||
}), |
||||
header: css({ |
||||
display: 'flex', |
||||
|
||||
button: { |
||||
marginLeft: 'auto', |
||||
}, |
||||
}), |
||||
iconSection: css({ |
||||
padding: '0 0 10px 0', |
||||
color: `${theme.colors.text.secondary}`, |
||||
|
||||
img: { |
||||
paddingRight: '4px', |
||||
}, |
||||
}), |
||||
rightButtonsWrapper: css({ |
||||
display: 'flex', |
||||
}), |
||||
rightButtons: css({ |
||||
marginLeft: 'auto', |
||||
}), |
||||
leftButton: css({ |
||||
marginRight: '10px', |
||||
}), |
||||
dataList: css({ |
||||
padding: '0px 28px 0px 28px', |
||||
}), |
||||
textPadding: css({ |
||||
paddingBottom: '12px', |
||||
}), |
||||
containerPadding: css({ |
||||
padding: '28px', |
||||
}), |
||||
infoContainer: css({ |
||||
border: `${theme.colors.border.strong}`, |
||||
padding: '16px', |
||||
backgroundColor: `${theme.colors.background.secondary}`, |
||||
borderRadius: `8px`, |
||||
borderBottomLeftRadius: 0, |
||||
}), |
||||
infoContainerWrapper: css({ |
||||
paddingBottom: '24px', |
||||
}), |
||||
metricTable: css({ |
||||
width: '100%', |
||||
}), |
||||
metricTableName: css({ |
||||
width: '15%', |
||||
}), |
||||
metricTableValue: css({ |
||||
fontFamily: `${theme.typography.fontFamilyMonospace}`, |
||||
fontSize: `${theme.typography.bodySmall.fontSize}`, |
||||
overflow: 'scroll', |
||||
textWrap: 'nowrap', |
||||
maxWidth: '150px', |
||||
width: '60%', |
||||
maskImage: `linear-gradient(to right, rgba(0, 0, 0, 1) 90%, rgba(0, 0, 0, 0))`, |
||||
}), |
||||
metricTableButton: css({ |
||||
float: 'right', |
||||
}), |
||||
queryQuestion: css({ |
||||
textAlign: 'end', |
||||
padding: '8px 0', |
||||
}), |
||||
secondaryText: css({ |
||||
color: `${theme.colors.text.secondary}`, |
||||
}), |
||||
loadingMessageContainer: css({ |
||||
border: `${theme.colors.border.strong}`, |
||||
padding: `16px`, |
||||
backgroundColor: `${theme.colors.background.secondary}`, |
||||
marginBottom: `20px`, |
||||
borderRadius: `8px`, |
||||
color: `${theme.colors.text.secondary}`, |
||||
fontStyle: 'italic', |
||||
}), |
||||
floatRight: css({ |
||||
float: 'right', |
||||
}), |
||||
codeText: css({ |
||||
fontFamily: `${theme.typography.fontFamilyMonospace}`, |
||||
fontSize: `${theme.typography.bodySmall.fontSize}`, |
||||
}), |
||||
bodySmall: css({ |
||||
fontSize: `${theme.typography.bodySmall.fontSize}`, |
||||
}), |
||||
explainPadding: css({ |
||||
paddingLeft: '26px', |
||||
}), |
||||
bottomMargin: css({ |
||||
marginBottom: '20px', |
||||
}), |
||||
topPadding: css({ |
||||
paddingTop: '22px', |
||||
}), |
||||
doc: css({ |
||||
textDecoration: 'underline', |
||||
}), |
||||
afterButtons: css({ |
||||
display: 'flex', |
||||
justifyContent: 'flex-end', |
||||
}), |
||||
feedbackStyle: css({ |
||||
margin: 0, |
||||
textAlign: 'right', |
||||
paddingTop: '22px', |
||||
paddingBottom: '22px', |
||||
}), |
||||
nextInteractionHeight: css({ |
||||
height: '88px', |
||||
}), |
||||
center: css({ |
||||
display: 'flex', |
||||
alignItems: 'center', |
||||
justifyContent: 'center', |
||||
}), |
||||
inputPadding: css({ |
||||
paddingBottom: '24px', |
||||
}), |
||||
querySuggestion: css({ |
||||
display: 'flex', |
||||
flexWrap: 'nowrap', |
||||
}), |
||||
longCode: css({ |
||||
width: '90%', |
||||
textWrap: 'nowrap', |
||||
overflow: 'scroll', |
||||
maskImage: `linear-gradient(to right, rgba(0, 0, 0, 1) 90%, rgba(0, 0, 0, 0))`, |
||||
|
||||
div: { |
||||
display: 'inline-block', |
||||
}, |
||||
}), |
||||
useButton: css({ |
||||
marginLeft: 'auto', |
||||
}), |
||||
suggestionFeedback: css({ |
||||
textAlign: 'left', |
||||
}), |
||||
feedbackQuestion: css({ |
||||
display: 'flex', |
||||
padding: '8px 0px', |
||||
h6: { marginBottom: 0 }, |
||||
i: { |
||||
marginTop: '1px', |
||||
}, |
||||
}), |
||||
explationTextInput: css({ |
||||
paddingLeft: '24px', |
||||
}), |
||||
submitFeedback: css({ |
||||
padding: '16px 0', |
||||
}), |
||||
noMargin: css({ |
||||
margin: 0, |
||||
}), |
||||
enableButtonTooltip: css({ |
||||
padding: 8, |
||||
}), |
||||
enableButtonTooltipText: css({ |
||||
color: `${theme.colors.text.secondary}`, |
||||
ul: { |
||||
marginLeft: 16, |
||||
}, |
||||
}), |
||||
link: css({ |
||||
color: `${theme.colors.text.link} !important`, |
||||
}), |
||||
}; |
||||
}; |
||||
|
||||
export const queryAssistanttestIds = { |
||||
promQail: 'prom-qail', |
||||
securityInfoButton: 'security-info-button', |
||||
clickForHistorical: 'click-for-historical', |
||||
clickForAi: 'click-for-ai', |
||||
submitPrompt: 'submit-prompt', |
||||
refinePrompt: 'refine-prompt', |
||||
}; |
||||
|
||||
const stateSlice = createSlice({ |
||||
name: 'metrics-modal-state', |
||||
initialState: initialState(), |
||||
reducers: { |
||||
showExplainer: (state, action: PayloadAction<boolean>) => { |
||||
state.showExplainer = action.payload; |
||||
}, |
||||
showStartingMessage: (state, action: PayloadAction<boolean>) => { |
||||
state.showStartingMessage = action.payload; |
||||
}, |
||||
indicateCheckbox: (state, action: PayloadAction<boolean>) => { |
||||
state.indicateCheckbox = action.payload; |
||||
}, |
||||
askForQueryHelp: (state, action: PayloadAction<boolean>) => { |
||||
state.askForQueryHelp = action.payload; |
||||
}, |
||||
/* |
||||
* start working on a collection of interactions |
||||
* { |
||||
* askForhelp y n |
||||
* prompt question |
||||
* queries querySuggestions |
||||
* } |
||||
* |
||||
*/ |
||||
addInteraction: (state, action: PayloadAction<{ suggestionType: SuggestionType; isLoading: boolean }>) => { |
||||
// AI or Historical?
|
||||
const interaction = createInteraction(action.payload.suggestionType, action.payload.isLoading); |
||||
const interactions = state.interactions; |
||||
state.interactions = interactions.concat([interaction]); |
||||
}, |
||||
updateInteraction: (state, action: PayloadAction<{ idx: number; interaction: Interaction }>) => { |
||||
// update the interaction by index
|
||||
// will most likely be the last interaction but we might update previous by giving them cues of helpful or not
|
||||
const index = action.payload.idx; |
||||
const updInteraction = action.payload.interaction; |
||||
|
||||
state.interactions = state.interactions.map((interaction: Interaction, idx: number) => { |
||||
if (idx === index) { |
||||
return updInteraction; |
||||
} |
||||
|
||||
return interaction; |
||||
}); |
||||
}, |
||||
}, |
||||
}); |
||||
|
||||
// actions to update the state
|
||||
export const { showStartingMessage, indicateCheckbox, addInteraction, updateInteraction } = stateSlice.actions; |
@ -1,51 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/QueryAssistantButton.test.tsx
|
||||
import { fireEvent, render, screen, waitFor } from '@testing-library/react'; |
||||
import userEvent from '@testing-library/user-event'; |
||||
|
||||
import { QueryAssistantButton } from './QueryAssistantButton'; |
||||
|
||||
const setShowDrawer = jest.fn(() => {}); |
||||
|
||||
describe('QueryAssistantButton', () => { |
||||
it('renders the button', async () => { |
||||
const props = createProps(true, 'metric', setShowDrawer); |
||||
render(<QueryAssistantButton {...props} />); |
||||
expect(screen.getByText('Get query suggestions')).toBeInTheDocument(); |
||||
}); |
||||
|
||||
it('shows the LLM app disabled message when LLM app is not set up with vector DB', async () => { |
||||
const props = createProps(false, 'metric', setShowDrawer); |
||||
render(<QueryAssistantButton {...props} />); |
||||
const button = screen.getByText('Get query suggestions'); |
||||
await userEvent.hover(button); |
||||
await waitFor(() => { |
||||
expect(screen.getByText('Install and enable the LLM plugin')).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('shows the message to select a metric when LLM is enabled and no metric is selected', async () => { |
||||
const props = createProps(true, '', setShowDrawer); |
||||
render(<QueryAssistantButton {...props} />); |
||||
const button = screen.getByText('Get query suggestions'); |
||||
await userEvent.hover(button); |
||||
await waitFor(() => { |
||||
expect(screen.getByText('First, select a metric.')).toBeInTheDocument(); |
||||
}); |
||||
}); |
||||
|
||||
it('calls setShowDrawer when button is clicked', async () => { |
||||
const props = createProps(true, 'metric', setShowDrawer); |
||||
render(<QueryAssistantButton {...props} />); |
||||
const button = screen.getByText('Get query suggestions'); |
||||
fireEvent.click(button); |
||||
expect(setShowDrawer).toHaveBeenCalled(); |
||||
}); |
||||
}); |
||||
|
||||
function createProps(llmAppEnabled: boolean, metric: string, setShowDrawer: () => void) { |
||||
return { |
||||
llmAppEnabled, |
||||
metric, |
||||
setShowDrawer, |
||||
}; |
||||
} |
@ -1,86 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/QueryAssistantButton.tsx
|
||||
import { selectors } from '@grafana/e2e-selectors'; |
||||
import { reportInteraction } from '@grafana/runtime'; |
||||
import { Button, Tooltip, useTheme2 } from '@grafana/ui'; |
||||
|
||||
import { getStyles } from './PromQail'; |
||||
import AI_Logo_color from './resources/AI_Logo_color.svg'; |
||||
|
||||
export type Props = { |
||||
llmAppEnabled: boolean; |
||||
metric: string; |
||||
setShowDrawer: (show: boolean) => void; |
||||
}; |
||||
|
||||
export function QueryAssistantButton(props: Props) { |
||||
const { llmAppEnabled, metric, setShowDrawer } = props; |
||||
|
||||
const llmAppDisabled = !llmAppEnabled; |
||||
const noMetricSelected = !metric; |
||||
|
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme); |
||||
|
||||
const button = () => { |
||||
return ( |
||||
<Button |
||||
variant={'secondary'} |
||||
onClick={() => { |
||||
reportInteraction('grafana_prometheus_promqail_ai_button_clicked', { |
||||
metric: metric, |
||||
}); |
||||
setShowDrawer(true); |
||||
}} |
||||
disabled={!metric || !llmAppEnabled} |
||||
data-testid={selectors.components.DataSource.Prometheus.queryEditor.builder.queryAdvisor} |
||||
> |
||||
<img height={16} src={AI_Logo_color} alt="AI logo black and white" /> |
||||
{'\u00A0'}Get query suggestions |
||||
</Button> |
||||
); |
||||
}; |
||||
|
||||
const selectMetricMessage = ( |
||||
<Tooltip content={'First, select a metric.'} placement={'bottom-end'}> |
||||
{button()} |
||||
</Tooltip> |
||||
); |
||||
|
||||
const llmAppMessage = ( |
||||
<Tooltip |
||||
interactive={true} |
||||
placement={'auto-end'} |
||||
content={ |
||||
<div className={styles.enableButtonTooltip}> |
||||
<h6>Query Advisor is disabled</h6> |
||||
<div className={styles.enableButtonTooltipText}>To enable Query Advisor you must:</div> |
||||
<div className={styles.enableButtonTooltipText}> |
||||
<ul> |
||||
<li> |
||||
<a |
||||
href={'https://grafana.com/docs/grafana-cloud/alerting-and-irm/machine-learning/llm-plugin/'} |
||||
target="_blank" |
||||
rel="noreferrer noopener" |
||||
className={styles.link} |
||||
> |
||||
Install and enable the LLM plugin |
||||
</a> |
||||
</li> |
||||
<li>Select a metric</li> |
||||
</ul> |
||||
</div> |
||||
</div> |
||||
} |
||||
> |
||||
{button()} |
||||
</Tooltip> |
||||
); |
||||
|
||||
if (llmAppDisabled) { |
||||
return llmAppMessage; |
||||
} else if (noMetricSelected) { |
||||
return selectMetricMessage; |
||||
} else { |
||||
return button(); |
||||
} |
||||
} |
@ -1,102 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/QuerySuggestionContainer.tsx
|
||||
import { cx } from '@emotion/css'; |
||||
import { useState } from 'react'; |
||||
|
||||
import { Button, useTheme2 } from '@grafana/ui'; |
||||
|
||||
import { PromVisualQuery } from '../../types'; |
||||
|
||||
import { getStyles, queryAssistanttestIds } from './PromQail'; |
||||
import { QuerySuggestionItem } from './QuerySuggestionItem'; |
||||
import { QuerySuggestion, SuggestionType } from './types'; |
||||
|
||||
export type Props = { |
||||
querySuggestions: QuerySuggestion[]; |
||||
suggestionType: SuggestionType; |
||||
closeDrawer: () => void; |
||||
nextInteraction: () => void; |
||||
queryExplain: (idx: number) => void; |
||||
onChange: (query: PromVisualQuery) => void; |
||||
prompt: string; |
||||
}; |
||||
|
||||
export function QuerySuggestionContainer(props: Props) { |
||||
const { suggestionType, querySuggestions, closeDrawer, nextInteraction, queryExplain, onChange, prompt } = props; |
||||
|
||||
const [hasNextInteraction, updateHasNextInteraction] = useState<boolean>(false); |
||||
|
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme); |
||||
|
||||
let text, secondaryText, refineText; |
||||
|
||||
if (suggestionType === SuggestionType.Historical) { |
||||
text = `Here are ${querySuggestions.length} query suggestions:`; |
||||
refineText = 'I want to write a prompt'; |
||||
} else if (suggestionType === SuggestionType.AI) { |
||||
text = text = 'Here is your query suggestion:'; |
||||
secondaryText = |
||||
'This query is based off of natural language descriptions of the most commonly used PromQL queries.'; |
||||
refineText = 'Refine prompt'; |
||||
} |
||||
|
||||
return ( |
||||
<> |
||||
{suggestionType === SuggestionType.Historical ? ( |
||||
<div className={styles.bottomMargin}>{text}</div> |
||||
) : ( |
||||
<> |
||||
<div className={styles.textPadding}>{text}</div> |
||||
<div className={cx(styles.secondaryText, styles.bottomMargin)}>{secondaryText}</div> |
||||
</> |
||||
)} |
||||
|
||||
<div className={styles.infoContainerWrapper}> |
||||
<div className={styles.infoContainer}> |
||||
{querySuggestions.map((qs: QuerySuggestion, idx: number) => { |
||||
return ( |
||||
<QuerySuggestionItem |
||||
historical={suggestionType === SuggestionType.Historical} |
||||
querySuggestion={qs} |
||||
key={idx} |
||||
order={idx + 1} |
||||
queryExplain={queryExplain} |
||||
onChange={onChange} |
||||
closeDrawer={closeDrawer} |
||||
last={idx === querySuggestions.length - 1} |
||||
// for feedback rudderstack events
|
||||
allSuggestions={querySuggestions.reduce((acc: string, qs: QuerySuggestion) => { |
||||
return acc + '$$' + qs.query; |
||||
}, '')} |
||||
prompt={prompt ?? ''} |
||||
/> |
||||
); |
||||
})} |
||||
</div> |
||||
</div> |
||||
{!hasNextInteraction && ( |
||||
<div className={styles.nextInteractionHeight}> |
||||
<div className={cx(styles.afterButtons, styles.textPadding)}> |
||||
<Button |
||||
onClick={() => { |
||||
updateHasNextInteraction(true); |
||||
nextInteraction(); |
||||
}} |
||||
data-testid={queryAssistanttestIds.refinePrompt} |
||||
fill="outline" |
||||
variant="secondary" |
||||
size="md" |
||||
> |
||||
{refineText} |
||||
</Button> |
||||
</div> |
||||
<div className={cx(styles.textPadding, styles.floatRight)}> |
||||
<Button fill="outline" variant="secondary" size="md" onClick={closeDrawer}> |
||||
Cancel |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
)} |
||||
</> |
||||
); |
||||
} |
@ -1,322 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/QuerySuggestionItem.tsx
|
||||
import { cx } from '@emotion/css'; |
||||
import { FormEvent, useState } from 'react'; |
||||
|
||||
import { SelectableValue } from '@grafana/data'; |
||||
import { reportInteraction } from '@grafana/runtime'; |
||||
import { Button, RadioButtonList, Spinner, TextArea, Toggletip, useTheme2 } from '@grafana/ui'; |
||||
|
||||
import { buildVisualQueryFromString } from '../../parsing'; |
||||
import { PromVisualQuery } from '../../types'; |
||||
|
||||
import { getStyles } from './PromQail'; |
||||
import { QuerySuggestion } from './types'; |
||||
|
||||
export type Props = { |
||||
querySuggestion: QuerySuggestion; |
||||
order: number; |
||||
queryExplain: (idx: number) => void; |
||||
historical: boolean; |
||||
onChange: (query: PromVisualQuery) => void; |
||||
closeDrawer: () => void; |
||||
last: boolean; |
||||
prompt: string; |
||||
allSuggestions: string | undefined; |
||||
}; |
||||
|
||||
const suggestionOptions: SelectableValue[] = [ |
||||
{ label: 'Yes', value: 'yes' }, |
||||
{ label: 'No', value: 'no' }, |
||||
]; |
||||
const explationOptions: SelectableValue[] = [ |
||||
{ label: 'Too vague', value: 'too vague' }, |
||||
{ label: 'Too technical', value: 'too technical' }, |
||||
{ label: 'Inaccurate', value: 'inaccurate' }, |
||||
{ label: 'Other', value: 'other' }, |
||||
]; |
||||
|
||||
export function QuerySuggestionItem(props: Props) { |
||||
const { querySuggestion, order, queryExplain, historical, onChange, closeDrawer, last, allSuggestions, prompt } = |
||||
props; |
||||
const [showExp, updShowExp] = useState<boolean>(false); |
||||
|
||||
const [gaveExplanationFeedback, updateGaveExplanationFeedback] = useState<boolean>(false); |
||||
const [gaveSuggestionFeedback, updateGaveSuggestionFeedback] = useState<boolean>(false); |
||||
|
||||
const [suggestionFeedback, setSuggestionFeedback] = useState({ |
||||
radioInput: '', |
||||
text: '', |
||||
}); |
||||
|
||||
const [explanationFeedback, setExplanationFeedback] = useState({ |
||||
radioInput: '', |
||||
text: '', |
||||
}); |
||||
|
||||
const theme = useTheme2(); |
||||
const styles = getStyles(theme); |
||||
|
||||
const { query, explanation } = querySuggestion; |
||||
|
||||
const feedbackToggleTip = (type: string) => { |
||||
const updateRadioFeedback = (value: string) => { |
||||
if (type === 'explanation') { |
||||
setExplanationFeedback({ |
||||
...explanationFeedback, |
||||
radioInput: value, |
||||
}); |
||||
} else { |
||||
setSuggestionFeedback({ |
||||
...suggestionFeedback, |
||||
radioInput: value, |
||||
}); |
||||
} |
||||
}; |
||||
|
||||
const updateTextFeedback = (e: FormEvent<HTMLTextAreaElement>) => { |
||||
if (type === 'explanation') { |
||||
setExplanationFeedback({ |
||||
...explanationFeedback, |
||||
text: e.currentTarget.value, |
||||
}); |
||||
} else { |
||||
setSuggestionFeedback({ |
||||
...suggestionFeedback, |
||||
text: e.currentTarget.value, |
||||
}); |
||||
} |
||||
}; |
||||
|
||||
const disabledButton = () => |
||||
type === 'explanation' ? !explanationFeedback.radioInput : !suggestionFeedback.radioInput; |
||||
|
||||
const questionOne = |
||||
type === 'explanation' ? 'Why was the explanation not helpful?' : 'Were the query suggestions helpful?'; |
||||
|
||||
return ( |
||||
<div className={styles.suggestionFeedback}> |
||||
<div> |
||||
<div className={styles.feedbackQuestion}> |
||||
<h6>{questionOne}</h6> |
||||
<i>(Required)</i> |
||||
</div> |
||||
<RadioButtonList |
||||
name="default" |
||||
options={type === 'explanation' ? explationOptions : suggestionOptions} |
||||
value={type === 'explanation' ? explanationFeedback.radioInput : suggestionFeedback.radioInput} |
||||
onChange={updateRadioFeedback} |
||||
/> |
||||
</div> |
||||
<div className={cx(type === 'explanation' && styles.explationTextInput)}> |
||||
{type !== 'explanation' && ( |
||||
<div className={styles.feedbackQuestion}> |
||||
<h6>How can we improve the query suggestions?</h6> |
||||
</div> |
||||
)} |
||||
<TextArea |
||||
type="text" |
||||
aria-label="Promqail suggestion text" |
||||
placeholder="Enter your feedback" |
||||
value={type === 'explanation' ? explanationFeedback.text : suggestionFeedback.text} |
||||
onChange={updateTextFeedback} |
||||
cols={100} |
||||
/> |
||||
</div> |
||||
|
||||
<div className={styles.submitFeedback}> |
||||
<Button |
||||
variant="primary" |
||||
size="sm" |
||||
disabled={disabledButton()} |
||||
onClick={() => { |
||||
// submit the rudderstack event
|
||||
if (type === 'explanation') { |
||||
explanationFeedbackEvent( |
||||
explanationFeedback.radioInput, |
||||
explanationFeedback.text, |
||||
querySuggestion, |
||||
historical, |
||||
prompt |
||||
); |
||||
updateGaveExplanationFeedback(true); |
||||
} else { |
||||
suggestionFeedbackEvent( |
||||
suggestionFeedback.radioInput, |
||||
suggestionFeedback.text, |
||||
allSuggestions ?? '', |
||||
historical, |
||||
prompt |
||||
); |
||||
updateGaveSuggestionFeedback(true); |
||||
} |
||||
}} |
||||
> |
||||
Submit |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
); |
||||
}; |
||||
|
||||
return ( |
||||
<> |
||||
<div className={styles.querySuggestion}> |
||||
<div title={query} className={cx(styles.codeText, styles.longCode)}> |
||||
{`${order}. ${query}`} |
||||
</div> |
||||
<div className={styles.useButton}> |
||||
<Button |
||||
variant="primary" |
||||
size="sm" |
||||
onClick={() => { |
||||
reportInteraction('grafana_prometheus_promqail_use_query_button_clicked', { |
||||
query: querySuggestion.query, |
||||
}); |
||||
const pvq = buildVisualQueryFromString(querySuggestion.query); |
||||
// check for errors!
|
||||
onChange(pvq.query); |
||||
closeDrawer(); |
||||
}} |
||||
> |
||||
Use |
||||
</Button> |
||||
</div> |
||||
</div> |
||||
<div> |
||||
<Button |
||||
fill="text" |
||||
variant="secondary" |
||||
icon={showExp ? 'angle-up' : 'angle-down'} |
||||
onClick={() => { |
||||
updShowExp(!showExp); |
||||
queryExplain(order - 1); |
||||
}} |
||||
className={cx(styles.bodySmall)} |
||||
size="sm" |
||||
> |
||||
Explainer |
||||
</Button> |
||||
{!showExp && order !== 5 && <div className={styles.textPadding}></div>} |
||||
|
||||
{showExp && !querySuggestion.explanation && ( |
||||
<div className={styles.center}> |
||||
<Spinner /> |
||||
</div> |
||||
)} |
||||
{showExp && querySuggestion.explanation && ( |
||||
<> |
||||
<div className={cx(styles.bodySmall, styles.explainPadding)}> |
||||
<div className={styles.textPadding}>This query is trying to answer the question:</div> |
||||
<div className={styles.textPadding}>{explanation}</div> |
||||
<div className={styles.textPadding}> |
||||
Learn more with this{' '} |
||||
<a |
||||
className={styles.doc} |
||||
href={'https://prometheus.io/docs/prometheus/latest/querying/examples/#query-examples'} |
||||
target="_blank" |
||||
rel="noopener noreferrer" |
||||
> |
||||
Prometheus doc |
||||
</a> |
||||
</div> |
||||
|
||||
<div className={cx(styles.rightButtons, styles.secondaryText)}> |
||||
Was this explanation helpful? |
||||
<div className={styles.floatRight}> |
||||
{!gaveExplanationFeedback ? ( |
||||
<> |
||||
<Button |
||||
fill="outline" |
||||
variant="secondary" |
||||
size="sm" |
||||
className={styles.leftButton} |
||||
onClick={() => { |
||||
explanationFeedbackEvent('Yes', '', querySuggestion, historical, prompt); |
||||
updateGaveExplanationFeedback(true); |
||||
}} |
||||
> |
||||
Yes |
||||
</Button> |
||||
<Toggletip |
||||
aria-label="Suggestion feedback" |
||||
content={feedbackToggleTip('explanation')} |
||||
placement="bottom-end" |
||||
closeButton={true} |
||||
> |
||||
<Button fill="outline" variant="secondary" size="sm"> |
||||
No |
||||
</Button> |
||||
</Toggletip> |
||||
</> |
||||
) : ( |
||||
'Thank you for your feedback!' |
||||
)} |
||||
</div> |
||||
</div> |
||||
</div> |
||||
|
||||
{!last && <hr />} |
||||
</> |
||||
)} |
||||
{last && ( |
||||
<div className={cx(styles.feedbackStyle)}> |
||||
{!gaveSuggestionFeedback ? ( |
||||
<Toggletip |
||||
aria-label="Suggestion feedback" |
||||
content={feedbackToggleTip('suggestion')} |
||||
placement="bottom-end" |
||||
closeButton={true} |
||||
> |
||||
<Button fill="outline" variant="secondary" size="sm"> |
||||
Give feedback on suggestions |
||||
</Button> |
||||
</Toggletip> |
||||
) : ( |
||||
// do this weird thing because the toggle tip doesn't allow an extra close function
|
||||
<Button fill="outline" variant="secondary" size="sm" disabled={true}> |
||||
Thank you for your feedback! |
||||
</Button> |
||||
)} |
||||
</div> |
||||
)} |
||||
</div> |
||||
</> |
||||
); |
||||
} |
||||
|
||||
function explanationFeedbackEvent( |
||||
radioInputFeedback: string, |
||||
textFeedback: string, |
||||
querySuggestion: QuerySuggestion, |
||||
historical: boolean, |
||||
prompt: string |
||||
) { |
||||
const event = 'grafana_prometheus_promqail_explanation_feedback'; |
||||
|
||||
reportInteraction(event, { |
||||
helpful: radioInputFeedback, |
||||
textFeedback: textFeedback, |
||||
suggestionType: historical ? 'historical' : 'AI', |
||||
query: querySuggestion.query, |
||||
explanation: querySuggestion.explanation, |
||||
prompt: prompt, |
||||
}); |
||||
} |
||||
|
||||
function suggestionFeedbackEvent( |
||||
radioInputFeedback: string, |
||||
textFeedback: string, |
||||
allSuggestions: string, |
||||
historical: boolean, |
||||
prompt: string |
||||
) { |
||||
const event = 'grafana_prometheus_promqail_suggestion_feedback'; |
||||
|
||||
reportInteraction(event, { |
||||
helpful: radioInputFeedback, |
||||
textFeedback: textFeedback, |
||||
suggestionType: historical ? 'historical' : 'AI', |
||||
allSuggestions: allSuggestions, |
||||
prompt: prompt, |
||||
}); |
||||
} |
@ -1 +0,0 @@ |
||||
export * from './PromQail'; |
@ -1,115 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/prompts.ts
|
||||
export const ExplainSystemPrompt = `You are an expert in Prometheus, the event monitoring and alerting application.
|
||||
|
||||
You are given relevant PromQL documentation, a type and description for a Prometheus metric, and a PromQL query on that metric. Using the provided information for reference, please explain what the output of a given query is in 1 sentences. Do not walk through what the functions do separately, make your answer concise.
|
||||
|
||||
Input will be in the form: |
||||
|
||||
|
||||
PromQL Documentation: |
||||
<PromQL documentation> |
||||
|
||||
PromQL Metrics Metadata: |
||||
<metric_name>(<metric type of the metric queried>): <description of what the metric means> |
||||
|
||||
PromQL Expression:
|
||||
<PromQL query> |
||||
|
||||
Examples of input and output |
||||
---------- |
||||
PromQL Documentation: |
||||
A counter is a cumulative metric that represents a single monotonically increasing counter whose value can only increase or be reset to zero on restart. For example, you can use a counter to represent the number of requests served, tasks completed, or errors. |
||||
topk (largest k elements by sample value) |
||||
sum (calculate sum over dimensions) |
||||
rate(v range-vector) calculates the per-second average rate of increase of the time series in the range vector. Breaks in monotonicity (such as counter resets due to target restarts) are automatically adjusted for.
|
||||
|
||||
PromQL Metrics Metadata: |
||||
traces_exporter_sent_spans(counter): Number of spans successfully sent to destination. |
||||
|
||||
PromQL Expression: |
||||
topk(3, sum by(cluster) (rate(traces_exporter_sent_spans{exporter="otlp"}[5m]))) |
||||
|
||||
This query is trying to answer the question: |
||||
What is the top 3 clusters that have successfully sent the most number of spans to the destination? |
||||
`;
|
||||
|
||||
export type ExplainUserPromptParams = { |
||||
documentation: string; |
||||
metricName: string; |
||||
metricType: string; |
||||
metricMetadata: string; |
||||
query: string; |
||||
}; |
||||
|
||||
export function GetExplainUserPrompt({ |
||||
documentation, |
||||
metricName, |
||||
metricType, |
||||
metricMetadata, |
||||
query, |
||||
}: ExplainUserPromptParams): string { |
||||
if (documentation === '') { |
||||
documentation = 'No documentation provided.'; |
||||
} |
||||
if (metricMetadata === '') { |
||||
metricMetadata = 'No description provided.'; |
||||
} |
||||
return ` |
||||
PromQL Documentation:
|
||||
${documentation} |
||||
|
||||
PromQL Metrics Metadata: |
||||
${metricName}(${metricType}): ${metricMetadata} |
||||
|
||||
PromQL Expression:
|
||||
${query} |
||||
|
||||
This query is trying to answer the question: |
||||
`;
|
||||
} |
||||
|
||||
export const SuggestSystemPrompt = `You are a Prometheus Query Language (PromQL) expert assistant inside Grafana.
|
||||
When the user asks a question, respond with a valid PromQL query and only the query. |
||||
|
||||
To help you answer the question, you will receive: |
||||
- List of potentially relevant PromQL templates with descriptions, ranked by semantic search score |
||||
- Prometheus metric |
||||
- Metric type |
||||
- Available Prometheus metric labels |
||||
- User question |
||||
|
||||
Policy: |
||||
- Do not invent labels names, you can only use the available labels |
||||
- For rate queries, use the $__rate_interval variable`;
|
||||
|
||||
// rewrite with a type
|
||||
export type SuggestUserPromptParams = { |
||||
promql: string; |
||||
question: string; |
||||
metricType: string; |
||||
labels: string; |
||||
templates: string; |
||||
}; |
||||
|
||||
export function GetSuggestUserPrompt({ |
||||
promql, |
||||
question, |
||||
metricType, |
||||
labels, |
||||
templates, |
||||
}: SuggestUserPromptParams): string { |
||||
if (templates === '') { |
||||
templates = 'No templates provided.'; |
||||
} else { |
||||
templates = templates.replace(/\n/g, '\n '); |
||||
} |
||||
return `Relevant PromQL templates:
|
||||
${templates} |
||||
|
||||
Prometheus metric: ${promql} |
||||
Metric type: ${metricType} |
||||
Available Prometheus metric labels: ${labels} |
||||
User question: ${question} |
||||
|
||||
\`\`\`promql`; |
||||
} |
Before Width: | Height: | Size: 1.2 KiB |
Before Width: | Height: | Size: 1.5 KiB |
@ -1,73 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.test.ts
|
||||
import { openai, vector } from '@grafana/llm'; |
||||
|
||||
import { guessMetricType, isLLMPluginEnabled } from './helpers'; |
||||
|
||||
// Mock the grafana llms module
|
||||
jest.mock('@grafana/llm', () => ({ |
||||
openai: { |
||||
health: jest.fn(), |
||||
}, |
||||
vector: { |
||||
health: jest.fn(), |
||||
}, |
||||
})); |
||||
|
||||
describe('isLLMPluginEnabled', () => { |
||||
it('should return true if LLM plugin is enabled', async () => { |
||||
jest.mocked(openai.health).mockResolvedValue({ ok: true, configured: true }); |
||||
jest.mocked(vector.health).mockResolvedValue({ ok: true, enabled: true }); |
||||
|
||||
const enabled = await isLLMPluginEnabled(); |
||||
|
||||
expect(enabled).toBe(true); |
||||
}); |
||||
|
||||
it('should return false if LLM plugin is not enabled', async () => { |
||||
jest.mocked(openai.health).mockResolvedValue({ ok: false, configured: false }); |
||||
jest.mocked(vector.health).mockResolvedValue({ ok: false, enabled: false }); |
||||
|
||||
const enabled = await isLLMPluginEnabled(); |
||||
|
||||
expect(enabled).toBe(false); |
||||
}); |
||||
|
||||
it('should return false if LLM plugin is enabled but health check fails', async () => { |
||||
jest.mocked(openai.health).mockResolvedValue({ ok: false, configured: true }); |
||||
jest.mocked(vector.health).mockResolvedValue({ ok: false, enabled: true }); |
||||
|
||||
const enabled = await isLLMPluginEnabled(); |
||||
|
||||
expect(enabled).toBe(false); |
||||
}); |
||||
}); |
||||
|
||||
const metricListWithType = [ |
||||
// below is summary metric family
|
||||
['go_gc_duration_seconds', 'summary'], |
||||
['go_gc_duration_seconds_count', 'summary'], |
||||
['go_gc_duration_seconds_sum', 'summary'], |
||||
// below is histogram metric family
|
||||
['go_gc_heap_allocs_by_size_bytes_total_bucket', 'histogram'], |
||||
['go_gc_heap_allocs_by_size_bytes_total_count', 'histogram'], |
||||
['go_gc_heap_allocs_by_size_bytes_total_sum', 'histogram'], |
||||
// below are counters
|
||||
['go_gc_heap_allocs_bytes_total', 'counter'], |
||||
['scrape_samples_post_metric_relabeling', 'counter'], |
||||
// below are gauges
|
||||
['go_gc_heap_goal_bytes', 'gauge'], |
||||
['nounderscorename', 'gauge'], |
||||
// below is both a histogram & summary
|
||||
['alertmanager_http_response_size_bytes', 'histogram,summary'], |
||||
['alertmanager_http_response_size_bytes_bucket', 'histogram,summary'], |
||||
['alertmanager_http_response_size_bytes_count', 'histogram,summary'], |
||||
['alertmanager_http_response_size_bytes_sum', 'histogram,summary'], |
||||
]; |
||||
|
||||
const metricList = metricListWithType.map((item) => item[0]); |
||||
|
||||
describe('guessMetricType', () => { |
||||
it.each(metricListWithType)("where input is '%s'", (metric: string, metricType: string) => { |
||||
expect(guessMetricType(metric, metricList)).toBe(metricType); |
||||
}); |
||||
}); |
@ -1,415 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts
|
||||
import { AnyAction } from 'redux'; |
||||
|
||||
import { openai, vector } from '@grafana/llm'; |
||||
import { reportInteraction } from '@grafana/runtime'; |
||||
|
||||
import { PrometheusDatasource } from '../../../../datasource'; |
||||
import { getMetadataHelp, getMetadataType } from '../../../../language_provider'; |
||||
import { promQueryModeller } from '../../../PromQueryModeller'; |
||||
import { buildVisualQueryFromString } from '../../../parsing'; |
||||
import { PromVisualQuery } from '../../../types'; |
||||
import { updateInteraction } from '../PromQail'; |
||||
import { |
||||
ExplainSystemPrompt, |
||||
GetExplainUserPrompt, |
||||
SuggestSystemPrompt, |
||||
GetSuggestUserPrompt, |
||||
SuggestUserPromptParams, |
||||
} from '../prompts'; |
||||
import { Interaction, QuerySuggestion, SuggestionType } from '../types'; |
||||
|
||||
import { createInteraction } from './state'; |
||||
import { getTemplateSuggestions } from './templates'; |
||||
|
||||
const OPENAI_MODEL_NAME = 'gpt-3.5-turbo-1106'; |
||||
const promQLTemplatesCollection = 'grafana.promql.templates'; |
||||
|
||||
interface TemplateSearchResult { |
||||
description: string | null; |
||||
metric_type: string | null; |
||||
promql: string | null; |
||||
} |
||||
|
||||
export function getExplainMessage(query: string, metric: string, datasource: PrometheusDatasource): openai.Message[] { |
||||
let metricMetadata = ''; |
||||
let metricType = ''; |
||||
|
||||
const pvq = buildVisualQueryFromString(query); |
||||
|
||||
if (datasource.languageProvider.metricsMetadata) { |
||||
metricType = getMetadataType(metric, datasource.languageProvider.metricsMetadata) ?? ''; |
||||
metricMetadata = getMetadataHelp(metric, datasource.languageProvider.metricsMetadata) ?? ''; |
||||
} |
||||
|
||||
const documentationBody = pvq.query.operations |
||||
.map((op) => { |
||||
const def = promQueryModeller.getOperationDef(op.id); |
||||
if (!def) { |
||||
return ''; |
||||
} |
||||
const title = def.renderer(op, def, '<expr>'); |
||||
const body = def.explainHandler ? def.explainHandler(op, def) : def.documentation; |
||||
|
||||
if (!body) { |
||||
return ''; |
||||
} |
||||
return `### ${title}:\n${body}`; |
||||
}) |
||||
.filter((item) => item !== '') |
||||
.join('\n'); |
||||
|
||||
return [ |
||||
{ role: 'system', content: ExplainSystemPrompt }, |
||||
{ |
||||
role: 'user', |
||||
content: GetExplainUserPrompt({ |
||||
documentation: documentationBody, |
||||
metricName: metric, |
||||
metricType: metricType, |
||||
metricMetadata: metricMetadata, |
||||
query: query, |
||||
}), |
||||
}, |
||||
]; |
||||
} |
||||
|
||||
function getSuggestMessages({ |
||||
promql, |
||||
question, |
||||
metricType, |
||||
labels, |
||||
templates, |
||||
}: SuggestUserPromptParams): openai.Message[] { |
||||
return [ |
||||
{ role: 'system', content: SuggestSystemPrompt }, |
||||
{ role: 'user', content: GetSuggestUserPrompt({ promql, question, metricType, labels, templates }) }, |
||||
]; |
||||
} |
||||
|
||||
/** |
||||
* Calls the API and adds suggestions to the interaction |
||||
* |
||||
* @param dispatch |
||||
* @param idx |
||||
* @param interaction |
||||
* @returns |
||||
*/ |
||||
export async function promQailExplain( |
||||
dispatch: React.Dispatch<AnyAction>, |
||||
idx: number, |
||||
query: PromVisualQuery, |
||||
interaction: Interaction, |
||||
suggIdx: number, |
||||
datasource: PrometheusDatasource |
||||
) { |
||||
const suggestedQuery = interaction.suggestions[suggIdx].query; |
||||
|
||||
const promptMessages = getExplainMessage(suggestedQuery, query.metric, datasource); |
||||
const interactionToUpdate = interaction; |
||||
|
||||
return openai |
||||
.streamChatCompletions({ |
||||
model: OPENAI_MODEL_NAME, |
||||
messages: promptMessages, |
||||
temperature: 0, |
||||
}) |
||||
.pipe(openai.accumulateContent()) |
||||
.subscribe((response) => { |
||||
const updatedSuggestions = interactionToUpdate.suggestions.map((sg: QuerySuggestion, sidx: number) => { |
||||
if (suggIdx === sidx) { |
||||
return { |
||||
query: interactionToUpdate.suggestions[suggIdx].query, |
||||
explanation: response, |
||||
}; |
||||
} |
||||
|
||||
return sg; |
||||
}); |
||||
|
||||
const payload = { |
||||
idx, |
||||
interaction: { |
||||
...interactionToUpdate, |
||||
suggestions: updatedSuggestions, |
||||
explanationIsLoading: false, |
||||
}, |
||||
}; |
||||
dispatch(updateInteraction(payload)); |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Check if sublist is fully contained in the superlist |
||||
* |
||||
* @param sublist |
||||
* @param superlist |
||||
* @returns true if fully contained, else false |
||||
*/ |
||||
function isContainedIn(sublist: string[], superlist: string[]): boolean { |
||||
for (const item of sublist) { |
||||
if (!superlist.includes(item)) { |
||||
return false; |
||||
} |
||||
} |
||||
return true; |
||||
} |
||||
|
||||
/** |
||||
* Guess the type of a metric, based on its name and its relation to other metrics available |
||||
* |
||||
* @param metric - name of metric whose type to guess |
||||
* @param allMetrics - list of all available metrics |
||||
* @returns - the guess of the type (string): counter,gauge,summary,histogram,'histogram,summary' |
||||
*/ |
||||
export function guessMetricType(metric: string, allMetrics: string[]): string { |
||||
const synthetic_metrics = new Set<string>([ |
||||
'up', |
||||
'scrape_duration_seconds', |
||||
'scrape_samples_post_metric_relabeling', |
||||
'scrape_series_added', |
||||
'scrape_samples_scraped', |
||||
'ALERTS', |
||||
'ALERTS_FOR_STATE', |
||||
]); |
||||
|
||||
if (synthetic_metrics.has(metric)) { |
||||
// these are all known to be counters
|
||||
return 'counter'; |
||||
} |
||||
if (metric.startsWith(':')) { |
||||
// probably recording rule
|
||||
return 'gauge'; |
||||
} |
||||
if (metric.endsWith('_info')) { |
||||
// typically series of 1s only, the labels are the useful part. TODO: add 'info' type
|
||||
return 'counter'; |
||||
} |
||||
|
||||
if (metric.endsWith('_created') || metric.endsWith('_total')) { |
||||
// prometheus naming style recommends counters to have these suffixes.
|
||||
return 'counter'; |
||||
} |
||||
|
||||
const underscoreIndex = metric.lastIndexOf('_'); |
||||
if (underscoreIndex < 0) { |
||||
// No underscores in the name at all, very little info to go on. Guess
|
||||
return 'gauge'; |
||||
} |
||||
|
||||
// See if the suffix is histogram-y or summary-y
|
||||
const [root, suffix] = [metric.slice(0, underscoreIndex), metric.slice(underscoreIndex + 1)]; |
||||
|
||||
if (['bucket', 'count', 'sum'].includes(suffix)) { |
||||
// Might be histogram + summary
|
||||
let familyMetrics = [`${root}_bucket`, `${root}_count`, `${root}_sum`, root]; |
||||
if (isContainedIn(familyMetrics, allMetrics)) { |
||||
return 'histogram,summary'; |
||||
} |
||||
|
||||
// Might be a histogram, if so all these metrics should exist too:
|
||||
familyMetrics = [`${root}_bucket`, `${root}_count`, `${root}_sum`]; |
||||
if (isContainedIn(familyMetrics, allMetrics)) { |
||||
return 'histogram'; |
||||
} |
||||
|
||||
// Or might be a summary
|
||||
familyMetrics = [`${root}_sum`, `${root}_count`, root]; |
||||
if (isContainedIn(familyMetrics, allMetrics)) { |
||||
return 'summary'; |
||||
} |
||||
|
||||
// Otherwise it's probably just a counter!
|
||||
return 'counter'; |
||||
} |
||||
|
||||
// One case above doesn't catch: summary or histogram,summary where the non-suffixed metric is chosen
|
||||
const familyMetrics = [`${metric}_sum`, `${metric}_count`, metric]; |
||||
if (isContainedIn(familyMetrics, allMetrics)) { |
||||
if (allMetrics.includes(`${metric}_bucket`)) { |
||||
return 'histogram,summary'; |
||||
} else { |
||||
return 'summary'; |
||||
} |
||||
} |
||||
|
||||
// All else fails, guess gauge
|
||||
return 'gauge'; |
||||
} |
||||
|
||||
/** |
||||
* Generate a suitable filter structure for the VectorDB call |
||||
* @param types: list of metric types to include in the result |
||||
* @returns the structure to pass to the vectorDB call. |
||||
*/ |
||||
function generateMetricTypeFilters(types: string[]) { |
||||
return types.map((type) => ({ |
||||
metric_type: { |
||||
$eq: type, |
||||
}, |
||||
})); |
||||
} |
||||
|
||||
/** |
||||
* Taking in a metric name, try to guess its corresponding metric _family_ name |
||||
* @param metric name |
||||
* @returns metric family name |
||||
*/ |
||||
function guessMetricFamily(metric: string): string { |
||||
if (metric.endsWith('_bucket') || metric.endsWith('_count') || metric.endsWith('_sum')) { |
||||
return metric.slice(0, metric.lastIndexOf('_')); |
||||
} |
||||
return metric; |
||||
} |
||||
|
||||
/** |
||||
* Check if the LLM plugin is enabled. |
||||
* Used in the PromQueryBuilder to enable/disable the button based on openai and vector db checks |
||||
* @returns true if the LLM plugin is enabled. |
||||
*/ |
||||
export async function isLLMPluginEnabled(): Promise<boolean> { |
||||
// Check if the LLM plugin is enabled.
|
||||
// If not, we won't be able to make requests, so return early.
|
||||
const openaiEnabled = openai.health().then((response) => response.ok); |
||||
const vectorEnabled = vector.health().then((response) => response.ok); |
||||
// combine 2 promises
|
||||
return Promise.all([openaiEnabled, vectorEnabled]).then((results) => { |
||||
return results.every((result) => result); |
||||
}); |
||||
} |
||||
|
||||
/** |
||||
* Calls the API and adds suggestions to the interaction |
||||
* |
||||
* @param dispatch |
||||
* @param idx |
||||
* @param interaction |
||||
* @returns |
||||
*/ |
||||
export async function promQailSuggest( |
||||
dispatch: React.Dispatch<AnyAction>, |
||||
idx: number, |
||||
query: PromVisualQuery, |
||||
labelNames: string[], |
||||
datasource: PrometheusDatasource, |
||||
interaction?: Interaction |
||||
) { |
||||
const interactionToUpdate = interaction ? interaction : createInteraction(SuggestionType.Historical); |
||||
|
||||
// Decide metric type
|
||||
let metricType = ''; |
||||
// Makes sure we loaded the metadata for metrics. Usually this is done in the start() method of the
|
||||
// provider but we only need the metadata here.
|
||||
if (!datasource.languageProvider.metricsMetadata) { |
||||
await datasource.languageProvider.loadMetricsMetadata(); |
||||
} |
||||
if (datasource.languageProvider.metricsMetadata) { |
||||
// `datasource.languageProvider.metricsMetadata` is a list of metric family names (with desired type)
|
||||
// from the datasource metadata endoint, but unfortunately the expanded _sum, _count, _bucket raw
|
||||
// metric names are also generated and populating this list (all of type counter). We want the metric
|
||||
// family type, so need to guess the metric family name from the chosen metric name, and test if that
|
||||
// metric family has a type specified.
|
||||
const metricFamilyGuess = guessMetricFamily(query.metric); |
||||
metricType = getMetadataType(metricFamilyGuess, datasource.languageProvider.metricsMetadata) ?? ''; |
||||
} |
||||
if (metricType === '') { |
||||
// fallback to heuristic guess
|
||||
metricType = guessMetricType(query.metric, datasource.languageProvider.metrics); |
||||
} |
||||
|
||||
if (interactionToUpdate.suggestionType === SuggestionType.Historical) { |
||||
return new Promise<void>((resolve) => { |
||||
return setTimeout(() => { |
||||
const suggestions = getTemplateSuggestions( |
||||
query.metric, |
||||
metricType, |
||||
promQueryModeller.renderLabels(query.labels) |
||||
); |
||||
|
||||
const payload = { |
||||
idx, |
||||
interaction: { ...interactionToUpdate, suggestions: suggestions, isLoading: false }, |
||||
}; |
||||
dispatch(updateInteraction(payload)); |
||||
resolve(); |
||||
}, 1000); |
||||
}); |
||||
} else { |
||||
type SuggestionBody = { |
||||
metric: string; |
||||
labels: string; |
||||
prompt?: string; |
||||
}; |
||||
|
||||
// get all available labels
|
||||
const metricLabels = await datasource.languageProvider.fetchLabelsWithMatch(query.metric); |
||||
|
||||
let feedTheAI: SuggestionBody = { |
||||
metric: query.metric, |
||||
// drop __name__ label because it's not useful
|
||||
labels: Object.keys(metricLabels) |
||||
.filter((label) => label !== '__name__') |
||||
.join(','), |
||||
}; |
||||
|
||||
// @ts-ignore llms types issue
|
||||
let results: Array<llms.vector.SearchResult<TemplateSearchResult>> = []; |
||||
if (interaction?.suggestionType === SuggestionType.AI) { |
||||
feedTheAI = { ...feedTheAI, prompt: interaction.prompt }; |
||||
|
||||
// @ts-ignore llms types issue
|
||||
results = await llms.vector.search<TemplateSearchResult>({ |
||||
query: interaction.prompt, |
||||
collection: promQLTemplatesCollection, |
||||
topK: 5, |
||||
filter: { |
||||
$or: generateMetricTypeFilters(metricType.split(',').concat(['*'])), |
||||
}, |
||||
}); |
||||
reportInteraction('grafana_prometheus_promqail_vector_results', { |
||||
metric: query.metric, |
||||
prompt: interaction.prompt, |
||||
results: results, |
||||
}); |
||||
// TODO: handle errors from vector search
|
||||
} |
||||
|
||||
const resultsString = results |
||||
.map((r) => { |
||||
return `${r.payload.promql} | ${r.payload.description} (score=${(r.score * 100).toFixed(1)})`; |
||||
}) |
||||
.join('\n'); |
||||
|
||||
const promptMessages = getSuggestMessages({ |
||||
promql: query.metric, |
||||
question: interaction ? interaction.prompt : '', |
||||
metricType: metricType, |
||||
labels: labelNames.join(', '), |
||||
templates: resultsString, |
||||
}); |
||||
|
||||
return openai |
||||
.streamChatCompletions({ |
||||
model: OPENAI_MODEL_NAME, |
||||
messages: promptMessages, |
||||
temperature: 0.5, |
||||
}) |
||||
.pipe(openai.accumulateContent()) |
||||
.subscribe((response) => { |
||||
const payload = { |
||||
idx, |
||||
interaction: { |
||||
...interactionToUpdate, |
||||
suggestions: [ |
||||
{ |
||||
query: response, |
||||
explanation: '', |
||||
}, |
||||
], |
||||
isLoading: false, |
||||
}, |
||||
}; |
||||
dispatch(updateInteraction(payload)); |
||||
}); |
||||
} |
||||
} |
@ -1,44 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/state.ts
|
||||
import { PromVisualQuery } from '../../../types'; |
||||
import { Interaction, SuggestionType } from '../types'; |
||||
|
||||
/** |
||||
* Initial state for PromQAIL |
||||
* @param query the prometheus query with metric and possible labels |
||||
*/ |
||||
export function initialState(query?: PromVisualQuery, showStartingMessage?: boolean): PromQailState { |
||||
return { |
||||
query: query ?? { |
||||
metric: '', |
||||
labels: [], |
||||
operations: [], |
||||
}, |
||||
showExplainer: false, |
||||
showStartingMessage: showStartingMessage ?? true, |
||||
indicateCheckbox: false, |
||||
askForQueryHelp: false, |
||||
interactions: [], |
||||
}; |
||||
} |
||||
|
||||
/** |
||||
* The PromQAIL state object |
||||
*/ |
||||
export interface PromQailState { |
||||
query: PromVisualQuery; |
||||
showExplainer: boolean; |
||||
showStartingMessage: boolean; |
||||
indicateCheckbox: boolean; |
||||
askForQueryHelp: boolean; |
||||
interactions: Interaction[]; |
||||
} |
||||
|
||||
export function createInteraction(suggestionType: SuggestionType, isLoading?: boolean): Interaction { |
||||
return { |
||||
suggestionType: suggestionType, |
||||
prompt: '', |
||||
suggestions: [], |
||||
isLoading: isLoading ?? false, |
||||
explanationIsLoading: false, |
||||
}; |
||||
} |
@ -1,342 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/templates.ts
|
||||
import { QuerySuggestion } from '../types'; |
||||
|
||||
interface TemplateData { |
||||
template: string; |
||||
description: string; |
||||
} |
||||
|
||||
export const generalTemplates: TemplateData[] = [ |
||||
{ |
||||
template: 'metric_a{}', |
||||
description: 'Get the data for "metric_a"', |
||||
}, |
||||
{ |
||||
template: 'avg by(c) (metric_a{})', |
||||
description: 'Average of all series in "metric_a" grouped by the label "c"', |
||||
}, |
||||
{ |
||||
template: 'count by(d) (metric_a{})', |
||||
description: 'Number of series in the metric "metric_a" grouped by the label "d"', |
||||
}, |
||||
{ |
||||
template: 'sum by(g) (sum_over_time(metric_a{}[1h]))', |
||||
description: |
||||
'For each series in the metric "metric_a", sum all values over 1 hour, then group those series by label "g" and sum.', |
||||
}, |
||||
{ |
||||
template: 'count(metric_a{})', |
||||
description: 'Count of series in the metric "metric_a"', |
||||
}, |
||||
{ |
||||
template: '(metric_a{})', |
||||
description: 'Get the data for "metric_a"', |
||||
}, |
||||
{ |
||||
template: 'count_over_time(metric_a{}[1h])', |
||||
description: 'Number of series of metric_a in a 1 hour interval', |
||||
}, |
||||
{ |
||||
template: 'changes(metric_a{}[1m])', |
||||
description: 'Number of times the values of each series in metric_a have changed in 1 minute periods', |
||||
}, |
||||
{ |
||||
template: 'count(count by(g) (metric_a{}))', |
||||
description: 'Total number of series in metric_a', |
||||
}, |
||||
{ |
||||
template: 'last_over_time(metric_a{}[1h])', |
||||
description: 'For each series in metric_a, get the last value in the 1 hour period.', |
||||
}, |
||||
{ |
||||
template: 'sum by(g) (count_over_time(metric_a{}[1h]))', |
||||
description: 'Grouped sum over the label "g" of the number of series of metric_a in a 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'count(metric_a{} == 99)', |
||||
description: 'Number of series of metric_a that have value 99', |
||||
}, |
||||
{ |
||||
template: 'min(metric_a{})', |
||||
description: 'At each timestamp, find the minimum of all series of the metric "metric_a"', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} != 99', |
||||
description: 'Series of metric_a which do not have the value 99', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} - 99', |
||||
description: 'metric_a minus 99', |
||||
}, |
||||
{ |
||||
template: 'quantile_over_time(0.99,metric_a{}[1h])', |
||||
description: 'The 99th quantile of values of metric_a in 1 hour', |
||||
}, |
||||
{ |
||||
template: 'count_values("aaaa",metric_a{})', |
||||
description: 'Count number of label values for a label named "aaaa"', |
||||
}, |
||||
{ |
||||
template: 'quantile by(l) (1,metric_a)', |
||||
description: 'Quantile of series in the metric "metric_a" grouped by the label "l"', |
||||
}, |
||||
]; |
||||
|
||||
export const counterTemplates: TemplateData[] = [ |
||||
{ |
||||
template: 'sum by(d) (rate(metric_a{}[1h]))', |
||||
description: |
||||
'Sum of the rate of increase or decrease of the metric "metric_a" per 1 hour period, grouped by the label "d"', |
||||
}, |
||||
{ |
||||
template: 'rate(metric_a{}[1m])', |
||||
description: 'Rate of change of the metric "metric_a" over 1 minute', |
||||
}, |
||||
{ |
||||
template: 'sum by(a) (increase(metric_a{}[5m]))', |
||||
description: |
||||
'Taking the metric "metric_a" find the increase in 5 minute periods of each series and aggregate sum over the label "a"', |
||||
}, |
||||
{ |
||||
template: 'sum(rate(metric_a{}[1m]))', |
||||
description: 'Total rate of change of all series of metric "metric_a" in 1 minute intervals', |
||||
}, |
||||
{ |
||||
template: 'sum(increase(metric_a{}[10m]))', |
||||
description: 'Total increase for each series of metric "metric_a" in 10 minute intervals', |
||||
}, |
||||
{ |
||||
template: 'increase(metric_a{}[1h])', |
||||
description: 'Increase in all series of "metric_a" in 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'sum by(d) (irate(metric_a{}[1h]))', |
||||
description: 'Sum of detailed rate of change of the metric "metric_a" over 1 hour grouped by label "d"', |
||||
}, |
||||
{ |
||||
template: 'irate(metric_a{}[1h])', |
||||
description: 'Detailed rate of change of the metric "metric_a" over 1 hour', |
||||
}, |
||||
{ |
||||
template: 'avg by(d) (rate(metric_a{}[1h]))', |
||||
description: |
||||
'Taking the rate of change of the metric "metric_a" in a 1 hour period, group by the label "d" and find the average of each group', |
||||
}, |
||||
{ |
||||
template: 'topk(5,sum by(g) (rate(metric_a{}[1h])))', |
||||
description: 'Top 5 of the summed groups "g" of the rate of change of metric_a', |
||||
}, |
||||
{ |
||||
template: 'sum(rate(metric_a{}[1h])) / sum(rate(metric_a{}[1h]))', |
||||
description: 'Relative sums of metric_a with different labels', |
||||
}, |
||||
{ |
||||
template: 'histogram_quantile(99,rate(metric_a{}[1h]))', |
||||
description: '99th percentile of the rate of change of metric_a in 1 hour periods', |
||||
}, |
||||
{ |
||||
template: 'avg(rate(metric_a{}[1m]))', |
||||
description: 'Average of the rate of all series of metric_a in 1 minute periods', |
||||
}, |
||||
{ |
||||
template: 'rate(metric_a{}[5m]) > 99', |
||||
description: 'Show series of metric_a only if their rate over 5 minutes is greater than 99', |
||||
}, |
||||
{ |
||||
template: 'count by(g) (rate(metric_a{}[1h]))', |
||||
description: 'Count of series of metric_a over all labels "g"', |
||||
}, |
||||
]; |
||||
|
||||
export const histogramTemplates: TemplateData[] = [ |
||||
{ |
||||
template: 'histogram_quantile(99,sum by(le) (rate(metric_a{}[1h])))', |
||||
description: |
||||
'Calculate the rate at which the metric "metric_a" is increasing or decreasing, summed over each bucket label "le", and then calculates the 99th percentile of those rates.', |
||||
}, |
||||
{ |
||||
template: 'histogram_quantile(99,sum by(g) (metric_a{}))', |
||||
description: '99th percentile of the sum of metric_a grouped by label "g"', |
||||
}, |
||||
{ |
||||
template: 'histogram_quantile(99,sum by(g) (irate(metric_a{}[1h])))', |
||||
description: '99th percentile of the grouped by "g" sum of the rate of each series in metric_a in an hour', |
||||
}, |
||||
{ |
||||
template: 'histogram_quantile(99,metric_a{})', |
||||
description: '99th percentile of metric_a', |
||||
}, |
||||
]; |
||||
|
||||
export const gaugeTemplates: TemplateData[] = [ |
||||
{ |
||||
template: 'sum by(c) (metric_a{})', |
||||
description: 'Sum the metric "metric_a" by each value in label "c"', |
||||
}, |
||||
{ |
||||
template: 'sum(metric_a{})', |
||||
description: 'Total sum of all the series of the metric named "metric_a"', |
||||
}, |
||||
{ |
||||
template: 'max by(dd) (metric_a{})', |
||||
description: 'Grouping the series the metric "metric_a" by the label "dd", get the maximum value of each group', |
||||
}, |
||||
{ |
||||
template: 'max(metric_a{})', |
||||
description: 'Maximum value of all series of the metric "metric_a" ', |
||||
}, |
||||
{ |
||||
template: 'avg(metric_a{})', |
||||
description: 'Average value of all the series of metric "metric_a"', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} > 99', |
||||
description: 'Show only the series of metric "metric_a" which currently have value greater than 99', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} / 99', |
||||
description: 'Values for "metric_a" all divided by 99', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} == 99', |
||||
description: 'Show series of metric_a that have value 99', |
||||
}, |
||||
{ |
||||
template: 'sum_over_time(metric_a{}[1h])', |
||||
description: 'Sum each series of metric_a over 1 hour', |
||||
}, |
||||
{ |
||||
template: 'avg_over_time(metric_a{}[1h])', |
||||
description: 'Average of each series of metric_a in a 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'sum(sum_over_time(metric_a{}[1h]))', |
||||
description: 'Sum of all values in all series in a 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'delta(metric_a{}[1m])', |
||||
description: 'Span or delta (maximum - minimum) of values of the metric "metric_a" in a 1 minute period. ', |
||||
}, |
||||
{ |
||||
template: 'avg by(g) (avg_over_time(metric_a{}[1h]))', |
||||
description: |
||||
'For 1 hour, take each series and find the average, then group by label "g" and find the average of each group', |
||||
}, |
||||
{ |
||||
template: 'max_over_time(metric_a{}[1h])', |
||||
description: 'Maximum values of each series in metric "metric_a" in a 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} * 99', |
||||
description: 'Values of metric_a multiplied by 99', |
||||
}, |
||||
{ |
||||
template: 'metric_a{} < 99', |
||||
description: 'Series of metric_a that have values less than 99', |
||||
}, |
||||
{ |
||||
template: 'max by() (max_over_time(metric_a{}[1h]))', |
||||
description: 'Find maximum value of all series in 1 hour periods', |
||||
}, |
||||
{ |
||||
template: 'topk(99,metric_a{})', |
||||
description: 'First 5 series of metric_a that have the highest values', |
||||
}, |
||||
{ |
||||
template: 'min by(g) (metric_a{})', |
||||
description: 'Minimum values of the series of metric_a grouped by label "g"', |
||||
}, |
||||
{ |
||||
template: 'topk(10,sum by(g) (metric_a{}))', |
||||
description: "Top 10 of the series of metric_a grouped and summed by the label 'g'", |
||||
}, |
||||
{ |
||||
template: 'avg(avg_over_time(metric_a{}[1h]))', |
||||
description: 'Average of all values inside a 1 hour period', |
||||
}, |
||||
{ |
||||
template: 'quantile by(h) (0.95,metric_a{})', |
||||
description: 'Calculate 95th percentile of metric_a when aggregated by the label "h"', |
||||
}, |
||||
{ |
||||
template: 'avg by(g) (metric_a{} > 99)', |
||||
description: |
||||
'Taking all series of metric_a with value greater than 99, group by label "g" and find the average of each group', |
||||
}, |
||||
{ |
||||
template: 'sum(metric_a{}) / 99', |
||||
description: 'Sum of all series of metric_a divided by 99', |
||||
}, |
||||
{ |
||||
template: 'count(sum by(g) (metric_a{}))', |
||||
description: 'Number of series of metric_a grouped by the label "g"', |
||||
}, |
||||
{ |
||||
template: 'max(max_over_time(metric_a{}[1h]))', |
||||
description: 'Find the max value of all series of metric_a in a 1 hour period', |
||||
}, |
||||
]; |
||||
|
||||
function processTemplate(templateData: TemplateData, metric: string, labels: string): QuerySuggestion { |
||||
return { |
||||
query: templateData.template.replace('metric_a', metric).replace('{}', labels), |
||||
explanation: templateData.description.replace('metric_a', metric), |
||||
}; |
||||
} |
||||
|
||||
export function getTemplateSuggestions(metricName: string, metricType: string, labels: string): QuerySuggestion[] { |
||||
let templateSuggestions: QuerySuggestion[] = []; |
||||
switch (metricType) { |
||||
case 'counter': |
||||
templateSuggestions = templateSuggestions.concat( |
||||
counterTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 2) |
||||
); |
||||
templateSuggestions = templateSuggestions.concat( |
||||
generalTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 3) |
||||
); |
||||
break; |
||||
case 'gauge': |
||||
templateSuggestions = templateSuggestions.concat( |
||||
gaugeTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 2) |
||||
); |
||||
templateSuggestions = templateSuggestions.concat( |
||||
generalTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 3) |
||||
); |
||||
break; |
||||
case 'histogram': |
||||
templateSuggestions = templateSuggestions.concat( |
||||
histogramTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 2) |
||||
); |
||||
templateSuggestions = templateSuggestions.concat( |
||||
generalTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 3) |
||||
); |
||||
break; |
||||
default: |
||||
templateSuggestions = templateSuggestions.concat( |
||||
generalTemplates |
||||
.map((t) => processTemplate(t, metricName, labels)) |
||||
.sort(() => Math.random() - 0.5) |
||||
.slice(0, 5) |
||||
); |
||||
break; |
||||
} |
||||
return templateSuggestions; |
||||
} |
@ -1,18 +0,0 @@ |
||||
// Core Grafana history https://github.com/grafana/grafana/blob/v11.0.0-preview/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/types.ts
|
||||
export type QuerySuggestion = { |
||||
query: string; |
||||
explanation: string; |
||||
}; |
||||
|
||||
export enum SuggestionType { |
||||
Historical = 'historical', |
||||
AI = 'AI', |
||||
} |
||||
|
||||
export type Interaction = { |
||||
prompt: string; |
||||
suggestionType: SuggestionType; |
||||
suggestions: QuerySuggestion[]; |
||||
isLoading: boolean; |
||||
explanationIsLoading: boolean; |
||||
}; |
|
|
Loading…
Reference in new issue