45 lines
1.7 KiB
Python
45 lines
1.7 KiB
Python
import uuid
|
|
|
|
from ielts_be.configs.constants import GPTModels, TemperatureSettings
|
|
from ielts_be.helpers import ExercisesHelper
|
|
from ielts_be.services import ILLMService
|
|
|
|
|
|
class WriteBlanks:
|
|
|
|
def __init__(self, llm: ILLMService):
|
|
self._llm = llm
|
|
|
|
async def gen_write_blanks_exercise(self, text: str, quantity: int, start_id: int, difficulty: str, max_words: int = 3):
|
|
messages = [
|
|
{
|
|
"role": "system",
|
|
"content": (
|
|
'You are a helpful assistant designed to output JSON on this format: '
|
|
'{"questions": [{"question": question, "possible_answers": ["answer_1", "answer_2"]}]}'
|
|
)
|
|
},
|
|
{
|
|
"role": "user",
|
|
"content": (
|
|
f'Generate {str(quantity)} {difficulty} CEFR level difficulty short answer questions, and the '
|
|
f'possible answers, must have maximum {max_words} words per answer, about this text:\n"{text}"'
|
|
)
|
|
|
|
}
|
|
]
|
|
|
|
response = await self._llm.prediction(
|
|
GPTModels.GPT_4_O, messages, ["questions"], TemperatureSettings.GEN_QUESTION_TEMPERATURE
|
|
)
|
|
questions = response["questions"][:quantity]
|
|
|
|
return {
|
|
"id": str(uuid.uuid4()),
|
|
"maxWords": max_words,
|
|
"prompt": f"Choose no more than {max_words} words and/or a number from the passage for each answer.",
|
|
"solutions": ExercisesHelper.build_write_blanks_solutions(questions, start_id),
|
|
"text": ExercisesHelper.build_write_blanks_text(questions, start_id),
|
|
"type": "writeBlanks"
|
|
}
|