You can not select more than 25 topics
Topics must start with a letter or number, can include dashes ('-') and can be up to 35 characters long.
68 lines
2.7 KiB
68 lines
2.7 KiB
<?php
|
|
/* For license terms, see /license.txt */
|
|
|
|
/**
|
|
Answer questions based on existing knowledge.
|
|
*/
|
|
require_once __DIR__.'/../../../main/inc/global.inc.php';
|
|
require_once __DIR__.'/../AiHelperPlugin.php';
|
|
require_once __DIR__.'/../src/openai/OpenAi.php';
|
|
|
|
$plugin = AiHelperPlugin::create();
|
|
|
|
$apiList = $plugin->getApiList();
|
|
$apiName = $plugin->get('api_name');
|
|
|
|
if (!in_array($apiName, array_keys($apiList))) {
|
|
throw new Exception("Ai API is not available for this request.");
|
|
}
|
|
|
|
switch ($apiName) {
|
|
case AiHelperPlugin::OPENAI_API:
|
|
|
|
$questionTypes = [
|
|
'multiple_choice' => 'multiple choice',
|
|
'unique_answer' => 'unique answer',
|
|
];
|
|
|
|
$nQ = (int) $_REQUEST['nro_questions'];
|
|
$lang = (string) $_REQUEST['language'];
|
|
$topic = (string) $_REQUEST['quiz_name'];
|
|
$questionType = $questionTypes[$_REQUEST['question_type']] ?? $questionTypes['multiple_choice'];
|
|
|
|
$prompt = 'Generate %d "%s" questions in Aiken format in the %s language about "%s", making sure there is a \'ANSWER\' line for each question. \'ANSWER\' lines must only mention the letter of the correct answer, not the full answer text and not a parenthesis. The response line must not be separated from the last answer by a blank line. Each answer starts with an uppercase letter, a dot, one space and the answer text. Include an \'ANSWER_EXPLANATION\' line after the \'ANSWER\' line for each question. The terms between single quotes above must not be translated. There must be a blank line between each question. Show the question directly without any prefix. Each answer must not be quoted.';
|
|
|
|
$apiKey = $plugin->get('api_key');
|
|
$organizationId = $plugin->get('organization_id');
|
|
|
|
$ai = new OpenAi($apiKey, $organizationId);
|
|
|
|
$temperature = 0.2;
|
|
$model = 'text-davinci-003';
|
|
$maxTokens = 2000;
|
|
$frequencyPenalty = 0;
|
|
$presencePenalty = 0.6;
|
|
$prompt = sprintf($prompt, $nQ, $questionType, $lang, $topic);
|
|
|
|
$complete = $ai->completion([
|
|
'model' => $model,
|
|
'prompt' => $prompt,
|
|
'temperature' => $temperature,
|
|
'max_tokens' => $maxTokens,
|
|
'frequency_penalty' => $frequencyPenalty,
|
|
'presence_penalty' => $presencePenalty,
|
|
]);
|
|
|
|
$result = json_decode($complete, true);
|
|
|
|
// Returns the text answers generated.
|
|
$return = ['success' => false, 'text' => ''];
|
|
if (!empty($result['choices'])) {
|
|
$return = [
|
|
'success' => true,
|
|
'text' => trim($result['choices'][0]['text']),
|
|
];
|
|
}
|
|
echo json_encode($return);
|
|
break;
|
|
}
|
|
|