ENCOA-256: Some more changes to level prompt and added mc to reading

This commit is contained in:
Carlos-Mesquita
2024-12-04 04:18:23 +00:00
parent 4e05c4d913
commit 68cab80851
7 changed files with 48 additions and 22 deletions

View File

@@ -1,5 +1,7 @@
from .true_false import TrueFalse
from .multiple_choice import MultipleChoice
__all__ = [
"TrueFalse"
]
"TrueFalse",
"MultipleChoice"
]

View File

@@ -0,0 +1,46 @@
import uuid
from app.configs.constants import GPTModels, TemperatureSettings
from app.helpers import ExercisesHelper
from app.services.abc import ILLMService
class MultipleChoice:
def __init__(self, llm: ILLMService):
self._llm = llm
async def gen_multiple_choice(
self, text: str, quantity: int, start_id: int, difficulty: str, n_options: int = 4
):
messages = [
{
"role": "system",
"content": (
'You are a helpful assistant designed to output JSON on this format: '
'{"questions": [{"id": "9", "options": [{"id": "A", "text": "Economic benefits"}, {"id": "B", "text": '
'"Government regulations"}, {"id": "C", "text": "Concerns about climate change"}, {"id": "D", "text": '
'"Technological advancement"}], "prompt": "What is the main reason for the shift towards renewable '
'energy sources?", "solution": "C", "variant": "text"}]}')
},
{
"role": "user",
"content": (
f'Generate {quantity} {difficulty} difficulty multiple choice questions of {n_options} '
f'options for this text:\n"' + text + '"')
}
]
questions = await self._llm.prediction(
GPTModels.GPT_4_O,
messages,
["questions"],
TemperatureSettings.GEN_QUESTION_TEMPERATURE
)
return {
"id": str(uuid.uuid4()),
"prompt": "Select the appropriate option.",
"questions": ExercisesHelper.fix_exercise_ids(questions, start_id)["questions"],
"type": "multipleChoice",
}