import random from app.configs.constants import GPTModels, TemperatureSettings, EducationalContent from app.services.abc import ILLMService class FillBlanks: def __init__(self, llm: ILLMService): self._llm = llm async def gen_fill_blanks( self, start_id: int, quantity: int, size: int = 300, topic=None ): if not topic: topic = random.choice(EducationalContent.MTI_TOPICS) print(quantity) print(start_id) messages = [ { "role": "system", "content": f'You are a helpful assistant designed to output JSON on this format: {self._fill_blanks_mc_template()}' }, { "role": "user", "content": f'Generate a text of at least {size} words about the topic {topic}.' }, { "role": "user", "content": ( f'From the generated text choose exactly {quantity} words (cannot be sequential words) replace ' 'each with {{id}} (starting from ' + str(start_id) + ' and incrementing), then generate a ' 'JSON object containing: the modified text, a solutions array with each word\'s correct ' 'letter (A-D), and a words array containing each id with four options where one is ' 'the original word (matching the solution) and three are plausible but incorrect ' 'alternatives that maintain grammatical consistency. ' 'You cannot use repeated words!' #TODO: Solve this after ) } ] question = await self._llm.prediction( GPTModels.GPT_4_O, messages, [], TemperatureSettings.GEN_QUESTION_TEMPERATURE ) return { **question, "type": "fillBlanks", "variant": "mc", "prompt": "Click a blank to select the appropriate word for it.", } @staticmethod def _fill_blanks_mc_template(): return { "text": "", "solutions": [ { "id": "", "solution": "" } ], "words": [ { "id": "", "options": { "A": "", "B": "", "C": "", "D": "" } } ] }