diff --git a/helper/exercises.py b/helper/exercises.py index 1e6bb65..d362ef9 100644 --- a/helper/exercises.py +++ b/helper/exercises.py @@ -675,15 +675,34 @@ def gen_multiple_choice_level(quantity: int, start_id=1): mc_questions = make_openai_instruct_call(GPT_3_5_TURBO_INSTRUCT, gen_multiple_choice_for_text, token_count, None, GEN_QUESTION_TEMPERATURE) - parse_mc_questions = "Parse the questions into this json format: {\"questions\": [{\"id\": \"9\", \"options\": [{\"id\": \"A\", \"text\": " \ + + split_mc_questions = mc_questions.split('13') + + parse_mc_questions = "Parse the questions into this json format: {\"questions\": [{\"id\": \"9\", \"options\": " \ + "[{\"id\": \"A\", \"text\": " \ "\"And\"}, {\"id\": \"B\", \"text\": \"Cat\"}, {\"id\": \"C\", \"text\": " \ "\"Happy\"}, {\"id\": \"D\", \"text\": \"Jump\"}], " \ "\"prompt\": \"Which of the following is a conjunction?\", " \ - "\"solution\": \"A\", \"variant\": \"text\"}]}. \nThe questions: '" + mc_questions + "'" - token_count = count_tokens(parse_mc_questions, model_name=GPT_3_5_TURBO_INSTRUCT)["n_tokens"] - 300 + "\"solution\": \"A\", \"variant\": \"text\"}]}." \ + " \nThe questions: '" + split_mc_questions[0] + "'" + + token_count = count_tokens(parse_mc_questions, model_name=GPT_3_5_TURBO_INSTRUCT)["n_tokens"] question = make_openai_instruct_call(GPT_3_5_TURBO_INSTRUCT, parse_mc_questions, token_count, ["questions"], GEN_QUESTION_TEMPERATURE) + + parse_mc_questions = "Parse the questions into this json format: {\"questions\": [{\"id\": \"9\", \"options\": " \ + "[{\"id\": \"A\", \"text\": " \ + "\"And\"}, {\"id\": \"B\", \"text\": \"Cat\"}, {\"id\": \"C\", \"text\": " \ + "\"Happy\"}, {\"id\": \"D\", \"text\": \"Jump\"}], " \ + "\"prompt\": \"Which of the following is a conjunction?\", " \ + "\"solution\": \"A\", \"variant\": \"text\"}]}. " \ + "\nThe questions: '" + '13' + split_mc_questions[1] + "'" + token_count = count_tokens(parse_mc_questions, model_name="gpt-3.5-turbo-instruct")["n_tokens"] + question_2 = make_openai_instruct_call(GPT_3_5_TURBO_INSTRUCT, parse_mc_questions, token_count, + ["questions"], + GEN_QUESTION_TEMPERATURE) + question["questions"].extend(question_2["questions"]) return { "id": str(uuid.uuid4()), "prompt": "Select the appropriate option.",