Auto-generate: Hide the button when LLM plugin is not enabled (#75878)

* Auto-generate: Hide the button when LLM plugin is not enabled

* Performance: Reduce time to receive the first character of OpenAI
pull/75933/head
Ivan Ortega Alba 2 years ago committed by GitHub
parent 7875dbc6eb
commit c88d5d7b40
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 46
      public/app/features/dashboard/components/GenAI/GenAIButton.test.tsx
  2. 35
      public/app/features/dashboard/components/GenAI/GenAIButton.tsx
  3. 19
      public/app/features/dashboard/components/GenAI/hooks.ts
  4. 16
      public/app/features/dashboard/components/GenAI/llms/openai.ts
  5. 7
      public/app/features/dashboard/components/GenAI/llms/types.ts

@ -9,11 +9,7 @@ import { locationService } from '@grafana/runtime';
import { GenAIButton, GenAIButtonProps } from './GenAIButton';
import { useOpenAIStream } from './hooks';
import { Role, isLLMPluginEnabled } from './utils';
jest.mock('./utils', () => ({
isLLMPluginEnabled: jest.fn(),
}));
import { Role } from './utils';
const mockedUseOpenAiStreamState = {
setMessages: jest.fn(),
@ -40,40 +36,28 @@ describe('GenAIButton', () => {
describe('when LLM plugin is not configured', () => {
beforeAll(() => {
jest.mocked(isLLMPluginEnabled).mockResolvedValue(false);
});
it('should render text ', async () => {
const { getByText } = setup();
waitFor(() => expect(getByText('Auto-generate')).toBeInTheDocument());
});
it('should disable the button', async () => {
const { getByRole } = setup();
waitFor(() => expect(getByRole('button')).toBeDisabled());
jest.mocked(useOpenAIStream).mockReturnValue({
error: undefined,
isGenerating: false,
reply: 'Some completed genereated text',
setMessages: jest.fn(),
value: {
enabled: false,
stream: new Observable().subscribe(),
},
});
});
it('should display an error message when hovering', async () => {
const { getByRole, getByTestId } = setup();
// Wait for the check to be completed
const button = getByRole('button');
await waitFor(() => expect(button).toBeDisabled());
await userEvent.hover(button);
const tooltip = await waitFor(() => getByTestId(selectors.components.Tooltip.container));
expect(tooltip).toBeVisible();
it('should not render anything', async () => {
setup();
// The tooltip keeps interactive to be able to click the link
await userEvent.hover(tooltip);
expect(tooltip).toBeVisible();
waitFor(async () => expect(await screen.findByText('Auto-generate')).not.toBeInTheDocument());
});
});
describe('when LLM plugin is properly configured, so it is enabled', () => {
const setMessagesMock = jest.fn();
beforeEach(() => {
jest.mocked(isLLMPluginEnabled).mockResolvedValue(true);
jest.mocked(useOpenAIStream).mockReturnValue({
error: undefined,
isGenerating: false,
@ -125,7 +109,6 @@ describe('GenAIButton', () => {
describe('when it is generating data', () => {
beforeEach(() => {
jest.mocked(isLLMPluginEnabled).mockResolvedValue(true);
jest.mocked(useOpenAIStream).mockReturnValue({
error: undefined,
isGenerating: true,
@ -171,7 +154,6 @@ describe('GenAIButton', () => {
describe('when there is an error generating data', () => {
const setMessagesMock = jest.fn();
beforeEach(() => {
jest.mocked(isLLMPluginEnabled).mockResolvedValue(true);
jest.mocked(useOpenAIStream).mockReturnValue({
error: new Error('Something went wrong'),
isGenerating: false,

@ -2,7 +2,7 @@ import { css } from '@emotion/css';
import React, { useEffect } from 'react';
import { GrafanaTheme2 } from '@grafana/data';
import { Button, Spinner, useStyles2, Link, Tooltip } from '@grafana/ui';
import { Button, Spinner, useStyles2, Tooltip } from '@grafana/ui';
import { useOpenAIStream } from './hooks';
import { OPEN_AI_MODEL, Message } from './utils';
@ -33,14 +33,8 @@ export const GenAIButton = ({
}: GenAIButtonProps) => {
const styles = useStyles2(getStyles);
// TODO: Implement error handling (use error object from hook)
const { setMessages, reply, isGenerating, value, error } = useOpenAIStream(OPEN_AI_MODEL, temperature);
const onClick = (e: React.MouseEvent<HTMLButtonElement>) => {
onClickProp?.(e);
setMessages(messages);
};
useEffect(() => {
// Todo: Consider other options for `"` sanitation
if (isGenerating && reply) {
@ -48,6 +42,16 @@ export const GenAIButton = ({
}
}, [isGenerating, reply, onGenerate]);
// The button is disabled if the plugin is not installed or enabled
if (!value?.enabled) {
return null;
}
const onClick = (e: React.MouseEvent<HTMLButtonElement>) => {
onClickProp?.(e);
setMessages(messages);
};
const getIcon = () => {
if (error || !value?.enabled) {
return 'exclamation-circle';
@ -58,21 +62,6 @@ export const GenAIButton = ({
return 'ai';
};
const getTooltipContent = () => {
if (error) {
return `Unexpected error: ${error.message}`;
}
if (!value?.enabled) {
return (
<span>
The LLM plugin is not correctly configured. See your <Link href={`/plugins/grafana-llm-app`}>settings</Link>{' '}
and enable your plugin.
</span>
);
}
return '';
};
const getText = () => {
if (error) {
return 'Retry';
@ -84,7 +73,7 @@ export const GenAIButton = ({
return (
<div className={styles.wrapper}>
{isGenerating && <Spinner size={14} />}
<Tooltip show={value?.enabled && !error ? false : undefined} interactive content={getTooltipContent()}>
<Tooltip show={error ? undefined : false} interactive content={`OpenAI error: ${error?.message}`}>
<Button
icon={getIcon()}
onClick={onClick}

@ -40,14 +40,13 @@ export function useOpenAIStream(
const [error, setError] = useState<Error>();
const { error: notifyError } = useAppNotification();
const { error: enabledError, value: enabled } = useAsync(
async () => await isLLMPluginEnabled(),
[isLLMPluginEnabled]
);
const { error: asyncError, value } = useAsync(async () => {
// Check if the LLM plugin is enabled and configured.
// If not, we won't be able to make requests, so return early.
const enabled = await isLLMPluginEnabled();
if (!enabled) {
return { enabled };
}
if (messages.length === 0) {
if (!enabled || !messages.length) {
return { enabled };
}
@ -89,10 +88,10 @@ export function useOpenAIStream(
},
}),
};
}, [messages]);
}, [messages, enabled]);
if (asyncError) {
setError(asyncError);
if (asyncError || enabledError) {
setError(asyncError || enabledError);
}
return {

@ -20,7 +20,7 @@ import {
import { getBackendSrv, getGrafanaLiveSrv, logDebug } from '@grafana/runtime';
import { LLM_PLUGIN_ID, LLM_PLUGIN_ROUTE, setLLMPluginVersion } from './constants';
import { LLMAppHealthCheck } from './types';
import { LLMAppSettings } from './types';
const OPENAI_CHAT_COMPLETIONS_PATH = 'openai/v1/chat/completions';
@ -366,13 +366,13 @@ let loggedWarning = false;
/** Check if the OpenAI API is enabled via the LLM plugin. */
export const enabled = async () => {
// Run a health check to see if the plugin is installed.
let response: LLMAppHealthCheck;
try {
response = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/health`, undefined, undefined, {
const settings: LLMAppSettings = await getBackendSrv().get(`${LLM_PLUGIN_ROUTE}/settings`, undefined, undefined, {
showSuccessAlert: false,
showErrorAlert: false,
});
setLLMPluginVersion(settings.info.version);
return settings.enabled ?? false;
} catch (e) {
if (!loggedWarning) {
logDebug(String(e));
@ -383,12 +383,4 @@ export const enabled = async () => {
}
return false;
}
const { details } = response;
// Update the version if it's present on the response.
if (details.version !== undefined) {
setLLMPluginVersion(details.version);
}
// If the plugin is installed then check if it is configured.
return details?.openAI ?? false;
};

@ -5,3 +5,10 @@ export type LLMAppHealthCheck = {
version?: string;
};
};
export type LLMAppSettings = {
enabled: boolean;
info: {
version: string;
};
};

Loading…
Cancel
Save