From 9e346616d0ba72bd884bca1e3027c4f5d2089b23 Mon Sep 17 00:00:00 2001 From: Edward Qian Date: Tue, 7 Nov 2023 22:27:05 -0500 Subject: [PATCH] Set temperature to 0 for promql builder llm features (#77520) set temperature for openai calls --- .../querybuilder/components/promQail/state/helpers.ts | 2 ++ 1 file changed, 2 insertions(+) diff --git a/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts b/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts index e317aa350f3..86901267052 100644 --- a/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts +++ b/public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts @@ -111,6 +111,7 @@ export async function promQailExplain( .streamChatCompletions({ model: OPENAI_MODEL_NAME, messages: promptMessages, + temperature: 0, }) .pipe(llms.openai.accumulateContent()) .subscribe((response) => { @@ -371,6 +372,7 @@ export async function promQailSuggest( .streamChatCompletions({ model: OPENAI_MODEL_NAME, messages: promptMessages, + temperature: 0, }) .pipe(llms.openai.accumulateContent()) .subscribe((response) => {