GenAI: Fixes multiple calls to settings and health (#87623)

* GenAI: Fixes multiple calls to settings and health

* swap order of tests given new caching

---------

Co-authored-by: nmarrs <nathanielmarrs@gmail.com>
drew08t/canvas-math-behind-scenes-demo
Torkel Ödegaard 1 year ago committed by GitHub
parent ef51a64b57
commit f83366fcdd
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
  1. 16
      public/app/features/dashboard/components/GenAI/utils.test.ts
  2. 21
      public/app/features/dashboard/components/GenAI/utils.ts

@ -98,22 +98,22 @@ describe('getDashboardChanges', () => {
});
describe('isLLMPluginEnabled', () => {
it('should return true if LLM plugin is enabled', async () => {
// Mock llms.openai.health to return true
jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false });
it('should return false if LLM plugin is not enabled', async () => {
// Mock llms.openai.health to return false
jest.mocked(llms.openai.health).mockResolvedValue({ ok: false, configured: false });
const enabled = await isLLMPluginEnabled();
expect(enabled).toBe(true);
expect(enabled).toBe(false);
});
it('should return false if LLM plugin is not enabled', async () => {
// Mock llms.openai.health to return false
jest.mocked(llms.openai.health).mockResolvedValue({ ok: false, configured: false });
it('should return true if LLM plugin is enabled', async () => {
// Mock llms.openai.health to return true
jest.mocked(llms.openai.health).mockResolvedValue({ ok: true, configured: false });
const enabled = await isLLMPluginEnabled();
expect(enabled).toBe(false);
expect(enabled).toBe(true);
});
});

@ -59,18 +59,35 @@ export function getDashboardChanges(dashboard: DashboardModel): {
};
}
// Shared healthcheck promise so avoid multiple calls llm app settings and health check APIs
let llmHealthCheck: Promise<boolean> | undefined;
/**
* Check if the LLM plugin is enabled.
* @returns true if the LLM plugin is enabled.
*/
export async function isLLMPluginEnabled() {
export async function isLLMPluginEnabled(): Promise<boolean> {
if (!config.apps['grafana-llm-app']) {
return false;
}
if (llmHealthCheck) {
return llmHealthCheck;
}
// Check if the LLM plugin is enabled.
// If not, we won't be able to make requests, so return early.
return llms.openai.health().then((response) => response.ok);
llmHealthCheck = new Promise((resolve) => {
llms.openai.health().then((response) => {
if (!response.ok) {
// Health check fail clear cached promise so we can try again later
llmHealthCheck = undefined;
}
resolve(response.ok);
});
});
return llmHealthCheck;
}
/**

Loading…
Cancel
Save