Set temperature to 0 for promql builder llm features (#77520)

set temperature for openai calls
pull/77763/head
Edward Qian 2 years ago committed by GitHub
parent 08f6abe4ac
commit 9e346616d0
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
  1. 2
      public/app/plugins/datasource/prometheus/querybuilder/components/promQail/state/helpers.ts

@ -111,6 +111,7 @@ export async function promQailExplain(
.streamChatCompletions({
model: OPENAI_MODEL_NAME,
messages: promptMessages,
temperature: 0,
})
.pipe(llms.openai.accumulateContent())
.subscribe((response) => {
@ -371,6 +372,7 @@ export async function promQailSuggest(
.streamChatCompletions({
model: OPENAI_MODEL_NAME,
messages: promptMessages,
temperature: 0,
})
.pipe(llms.openai.accumulateContent())
.subscribe((response) => {

Loading…
Cancel
Save