Merged in feature/ExamGenRework (pull request #129)

Feature/ExamGenRework
This commit is contained in:
Tiago Ribeiro
2024-12-30 19:05:18 +00:00
22 changed files with 796 additions and 122 deletions

View File

@@ -30,7 +30,6 @@ const ListeningComponents: React.FC<Props> = ({ currentSection, localSettings, u
const {
focusedSection,
difficulty,
sections
} = useExamEditorStore(state => state.modules[currentModule]);
const [originalAudioUrl, setOriginalAudioUrl] = useState<string | undefined>();

View File

@@ -27,6 +27,7 @@ const ListeningSettings: React.FC = () => {
sections,
minTimer,
isPrivate,
instructionsState
} = useExamEditorStore(state => state.modules[currentModule]);
const {
@@ -71,10 +72,33 @@ const ListeningSettings: React.FC = () => {
try {
const sectionsWithAudio = sections.filter(s => (s.state as ListeningPart).audio?.source);
if (instructionsState.chosenOption.value === "Custom" && !instructionsState.currentInstructionsURL.startsWith("blob:")) {
toast.error("Generate the custom instructions audio first!");
return;
}
if (sectionsWithAudio.length > 0) {
let instructionsURL = instructionsState.currentInstructionsURL;
if (instructionsState.chosenOption.value === "Custom") {
const instructionsFormData = new FormData();
const instructionsResponse = await fetch(instructionsState.currentInstructionsURL);
const instructionsBlob = await instructionsResponse.blob();
instructionsFormData.append('file', instructionsBlob, 'audio.mp3');
const instructionsUploadResponse = await axios.post('/api/storage', instructionsFormData, {
params: {
directory: 'listening_instructions'
},
headers: {
'Content-Type': 'multipart/form-data'
}
});
instructionsURL = instructionsUploadResponse.data.urls[0];
}
const formData = new FormData();
const sectionMap = new Map<number, string>();
await Promise.all(
sectionsWithAudio.map(async (section) => {
const listeningPart = section.state as ListeningPart;
@@ -120,6 +144,7 @@ const ListeningSettings: React.FC = () => {
variant: sections.length === 4 ? "full" : "partial",
difficulty,
private: isPrivate,
instructions: instructionsURL
};
const result = await axios.post('/api/exam/listening', exam);
@@ -140,6 +165,11 @@ const ListeningSettings: React.FC = () => {
const preview = () => {
if (instructionsState.chosenOption.value === "Custom" && !instructionsState.currentInstructionsURL.startsWith("blob:")) {
toast.error("Generate the custom instructions audio first!");
return;
}
setExam({
parts: sections.map((s) => {
const exercise = s.state as ListeningPart;
@@ -156,6 +186,7 @@ const ListeningSettings: React.FC = () => {
variant: sections.length === 4 ? "full" : "partial",
difficulty,
private: isPrivate,
instructions: instructionsState.currentInstructionsURL
} as ListeningExam);
setExerciseIndex(0);
setQuestionIndex(0);

View File

@@ -0,0 +1,297 @@
import React, { useCallback, useEffect, useMemo, useRef, useState } from "react";
import Button from "@/components/Low/Button";
import Modal from "@/components/Modal";
import useExamEditorStore from "@/stores/examEditor";
import { PRESETS, isValidPresetID } from "./presets";
import AudioPlayer from "@/components/Low/AudioPlayer";
import Select from "@/components/Low/Select";
import AutoExpandingTextArea from "@/components/Low/AutoExpandingTextarea";
import { ListeningInstructionsState } from "@/stores/examEditor/types";
import { debounce } from "lodash";
import axios from "axios";
import { toast } from "react-toastify";
import { playSound } from "@/utils/sound";
import { BsArrowRepeat } from "react-icons/bs";
import { GiBrain } from "react-icons/gi";
const ListeningInstructions: React.FC = () => {
const { dispatch } = useExamEditorStore();
const [loading, setLoading] = useState(false);
const { instructionsState: globalInstructions, sections } = useExamEditorStore(s => s.modules["listening"]);
const [localInstructions, setLocalInstructions] = useState<ListeningInstructionsState>(globalInstructions);
const pendingUpdatesRef = useRef<Partial<ListeningInstructionsState>>({});
useEffect(() => {
if (globalInstructions) {
setLocalInstructions(globalInstructions);
}
}, [globalInstructions]);
const debouncedUpdateGlobal = useMemo(() => {
return debounce(() => {
if (Object.keys(pendingUpdatesRef.current).length > 0) {
dispatch({
type: "UPDATE_MODULE",
payload: {
module: "listening",
updates: {
instructionsState: {
...globalInstructions,
...pendingUpdatesRef.current
}
}
}
});
pendingUpdatesRef.current = {};
}
}, 1000);
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dispatch, globalInstructions]);
const updateInstructionsAndSchedule = useCallback((
updates: Partial<ListeningInstructionsState> | ((prev: ListeningInstructionsState) => Partial<ListeningInstructionsState>),
schedule: boolean = true
) => {
const newUpdates = typeof updates === 'function' ? updates(localInstructions) : updates;
setLocalInstructions(prev => ({
...prev,
...newUpdates
}));
if (schedule) {
pendingUpdatesRef.current = {
...pendingUpdatesRef.current,
...newUpdates
};
debouncedUpdateGlobal();
} else {
dispatch({
type: "UPDATE_MODULE",
payload: {
module: "listening",
updates: {
instructionsState: {
...globalInstructions,
...newUpdates
}
}
}
});
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dispatch, debouncedUpdateGlobal]);
const setIsOpen = useCallback((isOpen: boolean) => {
updateInstructionsAndSchedule({ isInstructionsOpen: isOpen }, false);
}, [updateInstructionsAndSchedule]);
const onOptionChange = useCallback((option: { value: string, label: string }) => {
const sectionIds = sections.map(s => s.sectionId);
const presetID = [...sectionIds].sort((a, b) => a - b).join('_');
const preset = isValidPresetID(presetID) ? PRESETS[presetID] : null;
updateInstructionsAndSchedule(prev => {
const updates: Partial<ListeningInstructionsState> = {
chosenOption: option
};
if (option.value === "Automatic" && preset) {
updates.currentInstructions = preset.text;
updates.currentInstructionsURL = preset.url;
} else if (option.value === "Custom") {
updates.currentInstructions = prev.customInstructions || "";
updates.currentInstructionsURL = "";
}
return updates;
}, false);
}, [sections, updateInstructionsAndSchedule]);
const onCustomInstructionChange = useCallback((text: string) => {
updateInstructionsAndSchedule({
chosenOption: { value: 'Custom', label: 'Custom' },
customInstructions: text,
currentInstructions: text
});
}, [updateInstructionsAndSchedule]);
useEffect(() => {
const sectionIds = sections.map(s => s.sectionId);
const presetID = [...sectionIds].sort((a, b) => a - b).join('_');
if (isValidPresetID(presetID)) {
const preset = PRESETS[presetID];
updateInstructionsAndSchedule(prev => {
const updates: Partial<ListeningInstructionsState> = {
presetInstructions: preset.text,
presetInstructionsURL: preset.url,
};
if (prev.chosenOption?.value === "Automatic") {
updates.currentInstructions = preset.text;
updates.currentInstructionsURL = preset.url;
}
return updates;
}, false);
}
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [sections.length]);
useEffect(() => {
return () => {
if (Object.keys(pendingUpdatesRef.current).length > 0) {
dispatch({
type: "UPDATE_MODULE",
payload: {
module: "listening",
updates: {
instructionsState: {
...globalInstructions,
...pendingUpdatesRef.current
}
}
}
});
}
};
// eslint-disable-next-line react-hooks/exhaustive-deps
}, [dispatch]);
const options = [
{ value: 'Automatic', label: 'Automatic' },
{ value: 'Custom', label: 'Custom' }
];
const generateInstructionsMP3 = useCallback(async () => {
if (!localInstructions.currentInstructions) {
toast.error('Please enter instructions text first');
return;
}
setLoading(true);
try {
const response = await axios.post(
'/api/exam/media/instructions',
{
text: localInstructions.currentInstructions
},
{
responseType: 'arraybuffer',
headers: {
'Accept': 'audio/mpeg'
}
}
);
if (localInstructions.currentInstructionsURL?.startsWith('blob:')) {
URL.revokeObjectURL(localInstructions.currentInstructionsURL);
}
const blob = new Blob([response.data], { type: 'audio/mpeg' });
const url = URL.createObjectURL(blob);
updateInstructionsAndSchedule({
customInstructionsURL: url,
currentInstructionsURL: url
}, false);
playSound("check");
toast.success('Audio generated successfully!');
} catch (error: any) {
toast.error('Failed to generate audio');
} finally {
setLoading(false);
}
}, [localInstructions.currentInstructions, localInstructions.currentInstructionsURL, updateInstructionsAndSchedule]);
return (
<>
<Modal
isOpen={localInstructions.isInstructionsOpen}
onClose={() => setIsOpen(false)}
>
<div className="flex flex-col gap-6 p-6 w-full">
<div className="flex flex-col gap-2">
<h2 className="text-xl font-semibold text-gray-800">Listening Instructions</h2>
<p className="text-sm text-gray-500">Choose instruction type or customize your own</p>
</div>
<div className="flex flex-col gap-4">
<div className="flex flex-col gap-1.5">
<label className="text-sm font-medium text-gray-700">Instruction Type</label>
<Select
options={options}
onChange={(o) => onOptionChange({ value: o!.value || "", label: o!.label })}
value={localInstructions.chosenOption}
className="w-full"
/>
</div>
<div className="flex flex-col gap-2">
<div className="flex flex-col flex-grow gap-1.5">
<label className="text-sm font-medium text-gray-700">Instructions Text</label>
<div className="flex flex-row gap-2">
<div className="flex flex-grow bg-gray-50 rounded-lg p-4 border border-gray-200 items-center">
<AutoExpandingTextArea
value={localInstructions.currentInstructions || ''}
onChange={onCustomInstructionChange}
className="bg-transparent resize-none w-full focus:outline-none"
placeholder="Enter custom instructions here..."
/>
</div>
{localInstructions.chosenOption?.value === 'Custom' && (
<div className="flex items-center">
<Button
onClick={generateInstructionsMP3}
disabled={loading}
customColor="bg-ielts-listening/70 hover:bg-ielts-listening border-ielts-listening"
className="text-white rounded-md"
>
{loading ? (
<div className="flex items-center justify-center">
<BsArrowRepeat className="text-white animate-spin" size={25} />
</div>
) : (
<div className="flex flex-row items-center">
<GiBrain className="mr-2" size={24} />
<span>Generate</span>
</div>
)}
</Button>
</div>
)}
</div>
</div>
</div>
{(localInstructions.chosenOption?.value === 'Automatic' ||
(localInstructions.chosenOption?.value === 'Custom' && localInstructions.currentInstructionsURL.startsWith("blob:")) && localInstructions.currentInstructionsURL !== '') && (
<div className="flex flex-col gap-1.5">
<label className="text-sm font-medium text-gray-700">Audio Preview</label>
<div className="bg-gray-50 rounded-lg p-4 border border-gray-200">
<AudioPlayer
src={localInstructions.currentInstructionsURL ?? ''}
color="listening"
/>
</div>
</div>
)}
</div>
</div>
</Modal>
<Button
onClick={() => setIsOpen(true)}
customColor="bg-ielts-listening/70 hover:bg-ielts-listening border-ielts-listening"
className="text-white self-end"
>
Audio Instructions
</Button>
</>
);
};
export default ListeningInstructions;

View File

@@ -0,0 +1,74 @@
const PRESETS = {
"1": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1.mp3?alt=media&token=abf0dc1a-6d24-4d33-be0e-7e15f4e4bec2",
text: "You will hear one recording and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recording can be played three times. The recording consists of a conversation between two people in an everyday social context. Pay close attention to the audio recording and answer the questions accordingly.",
},
"2": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_2.mp3?alt=media&token=a635f234-e470-4980-9690-e81544bbbe42",
text: "You will hear one recording and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recording can be played three times. The recording consists of a monologue set in a social context. Pay close attention to the audio recording and answer the questions accordingly.",
},
"3": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_3.mp3?alt=media&token=9659155d-0167-4288-9ba7-4135e135151d",
text: "You will hear one recording and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recording can be played three times. The recording consists of a conversation between up to four individuals in an educational context. Pay close attention to the audio recording and answer the questions accordingly."
},
"4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_4.mp3?alt=media&token=ed50aae9-2bd7-4d09-a5c9-81cb55ec29fb",
text: "You will hear one recording and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recording can be played three times. The recording consists of a monologue about an academic subject. Pay close attention to the audio recording and answer the questions accordingly."
},
"1_2": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_2.mp3?alt=media&token=16b1b6a8-6664-40fa-bb10-f8c89798d43d",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a monologue set in a social context. Pay close attention to the audio recordings and answer the questions accordingly."
},
"1_3": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_3.mp3?alt=media&token=3c3264b9-d277-4e43-91f9-6fa77cfd701e",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a conversation between up to four individuals in an educational context. Pay close attention to the audio recordings and answer the questions accordingly.",
},
"1_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_4.mp3?alt=media&token=350511e6-7010-43f7-a258-662e91ff7399",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"2_3": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_2_3.mp3?alt=media&token=fd260687-35e9-4386-8843-b58c2146dd48",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a monologue set in a social context. In the second part you will hear a conversation between up to four individuals in an educational context. Pay close attention to the audio recordings and answer the questions accordingly."
},
"2_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_2_4.mp3?alt=media&token=0d85d499-5461-4d0f-8952-20aba319f783",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a monologue set in a social context. In the second part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"3_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_3_4.mp3?alt=media&token=79bdabde-3d05-4234-bec7-5a8b385c2479",
text: "You will hear two recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. The recordings can be played three times. The module is in 2 parts. In the first part you will hear a conversation between up to four individuals in an educational context. In the second part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"1_2_3": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_2_3.mp3?alt=media&token=8bdb42dd-e3ed-446b-8760-281768c005e6",
text: "You will hear a number of different recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. All the recordings can be played three times. The module is in 3 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a monologue set in a social context. In the third part you will hear a conversation between up to four individuals in an educational context. Pay close attention to the audio recordings and answer the questions accordingly."
},
"1_2_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_2_4.mp3?alt=media&token=5458c3c1-d398-453f-be97-ef1785f9d7e3",
text: "You will hear a number of different recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. All the recordings can be played three times. The module is in 3 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a monologue set in a social context. In the third part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"1_3_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_3_4.mp3?alt=media&token=0380653e-be5b-4c89-9814-a996ae77a74a",
text: "You will hear a number of different recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. All the recordings can be played three times. The module is in 3 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a conversation between up to four individuals in an educational context. In the third part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"2_3_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_2_3_4.mp3?alt=media&token=74bf11d6-e3d4-4711-bdc6-b0adbcaf11d4",
text: "You will hear a number of different recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. All the recordings can be played three times. The module is in 3 parts. In the first part you will hear a monologue set in a social context. In the second part you will hear a conversation between up to four individuals in an educational context. In the third part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
},
"1_2_3_4": {
url: "https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_2_3_4.mp3?alt=media&token=7a7ac516-221d-4e79-bd28-5d6bee9d79d8",
text: "You will hear a number of different recordings and you will have to answer questions on what you hear. There will be time for you to read the instructions and questions and you will have a chance to check your work. All the recordings can be played three times. The module is in 4 parts. In the first part you will hear a conversation between two people in an everyday social context. In the second part you will hear a monologue set in a social context. In the third part you will hear a conversation between up to four individuals in an educational context. In the fourth part you will hear a monologue about an academic subject. Pay close attention to the audio recordings and answer the questions accordingly."
}
}
type PresetID = keyof typeof PRESETS;
function isValidPresetID(id: string): id is PresetID {
return id in PRESETS;
}
export {
PRESETS,
isValidPresetID
};

View File

@@ -18,7 +18,8 @@ import SpeakingSettings from "./SettingsEditor/speaking";
import ImportOrStartFromScratch from "./ImportExam/ImportOrFromScratch";
import { defaultSectionSettings } from "@/stores/examEditor/defaults";
import Button from "../Low/Button";
import ResetModule from "./ResetModule";
import ResetModule from "./Standalone/ResetModule";
import ListeningInstructions from "./Standalone/ListeningInstructions";
const DIFFICULTIES: Difficulty[] = ["A1", "A2", "B1", "B2", "C1", "C2"];
@@ -200,6 +201,7 @@ const ExamEditor: React.FC<{ levelParts?: number }> = ({ levelParts = 0 }) => {
required
/>
</div>
{currentModule === "listening" && <ListeningInstructions />}
{["reading", "listening", "level"].includes(currentModule) && <Button onClick={() => setIsResetModuleOpen(true)} customColor={`bg-ielts-${currentModule}/70 hover:bg-ielts-${currentModule} border-ielts-${currentModule}`} className={`text-white self-end`}>
Reset Module
</Button>}

View File

@@ -199,7 +199,7 @@ const UserImportSummary: React.FC<Props> = ({ parsedExcel, newUsers, enlistedUse
</>
</Modal>
<Modal isOpen={showEnlistedModal} onClose={() => setShowEnlistedModal(false)}>
<Modal isOpen={showEnlistedModal} onClose={() => setShowEnlistedModal(false)} maxWidth='max-w-[85%]'>
<>
<div className="flex items-center gap-2 mb-6">
<FaUsers className="w-5 h-5 text-blue-500" />

View File

@@ -135,8 +135,8 @@ const Listening: React.FC<ExamProps<ListeningExam>> = ({ exam, showSolutions = f
/>, [partIndex, assignment, timesListened, setShowTextModal, setTimesListened])
const memoizedInstructions = useMemo(() =>
<RenderAudioInstructionsPlayer />
, [])
<RenderAudioInstructionsPlayer instructions={exam.instructions} />
, [exam.instructions])
return (
<>

View File

@@ -2,16 +2,23 @@ import AudioPlayer from "@/components/Low/AudioPlayer";
import { v4 } from "uuid";
const INSTRUCTIONS_AUDIO_SRC =
// Old instructions they were porbably taken from
// an heygen video since I couldn't find the Polly Voice
const OLD_INSTRUCTIONS_AUDIO_SRC =
"https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/generic_listening_intro_v2.mp3?alt=media&token=16769f5f-1e9b-4a72-86a9-45a6f0fa9f82";
const RenderAudioInstructionsPlayer: React.FC = () => (
// New full exam module audio with Polly Matthew voice
const NEW_INSTRUCTIONS_AUDIO_SRC =
"https://firebasestorage.googleapis.com/v0/b/storied-phalanx-349916.appspot.com/o/listening_instructions%2FpresetInstructions_1_2_3_4.mp3?alt=media&token=7a7ac516-221d-4e79-bd28-5d6bee9d79d8";
const RenderAudioInstructionsPlayer: React.FC<{instructions?: string}> = ({instructions}) => (
<div className="flex flex-col gap-8 w-full bg-mti-gray-seasalt rounded-xl py-8 px-16">
<div className="flex flex-col w-full gap-2">
<h4 className="text-xl font-semibold">Please listen to the instructions audio attentively.</h4>
</div>
<div className="rounded-xl flex flex-col gap-4 items-center w-full h-fit">
<AudioPlayer key={v4()} src={INSTRUCTIONS_AUDIO_SRC} color="listening" />
<AudioPlayer key={v4()} src={instructions ?? NEW_INSTRUCTIONS_AUDIO_SRC} color="listening" />
</div>
</div>
);

View File

@@ -1,3 +1,4 @@
import instructions from "@/pages/api/exam/media/instructions";
import { Module } from ".";
export type Exam = ReadingExam | ListeningExam | WritingExam | SpeakingExam | LevelExam;
@@ -69,6 +70,7 @@ export interface LevelPart extends Section {
export interface ListeningExam extends ExamBase {
parts: ListeningPart[];
module: "listening";
instructions?: string;
}
export type Message = { name: string; gender: string; text: string; voice?: string; };

View File

@@ -87,7 +87,15 @@ export default function BatchCreateUser({ user, entities = [], permissions, onFi
const [isExpiryDateEnabled, setIsExpiryDateEnabled] = useState(true);
const [type, setType] = useState<Type>("student");
const [showHelp, setShowHelp] = useState(false);
const [entity, setEntity] = useState((entities || [])[0]?.id || undefined)
const [entity, setEntity] = useState<{id: string | null, label: string | null}| undefined>(() => {
if (!entities?.length) {
return undefined;
}
return {
id: entities[0].id,
label: entities[0].label
};
});
const { openFilePicker, filesContent, clear } = useFilePicker({
accept: ".xlsx",
@@ -291,11 +299,28 @@ export default function BatchCreateUser({ user, entities = [], permissions, onFi
if (!!crossRefEmails) {
const existingEmails = new Set(crossRefEmails.map((x: any) => x.email));
const dupes = infos.filter(info => existingEmails.has(info.email));
const newUsersList = infos.filter(info => !existingEmails.has(info.email));
const newUsersList = infos
.filter(info => !existingEmails.has(info.email))
.map(info => ({
...info,
entityLabels: [entity!.label!]
}));
setNewUsers(newUsersList);
setDuplicatedUsers(dupes);
const {data: emailEntityMap} = await axios.post("/api/users/controller?op=getEntities", {
emails: dupes.map((x) => x.email)
});
const withLabels = dupes.map((u) => ({
...u,
entityLabels: emailEntityMap.find((e: any) => e.email === u.email)?.entityLabels || []
}))
setDuplicatedUsers(withLabels);
} else {
setNewUsers(infos);
const withLabel = infos.map(info => ({
...info,
entityLabels: [entity!.label!]
}));
setNewUsers(withLabel);
}
} catch (error) {
toast.error("Something went wrong, please try again later!");
@@ -305,7 +330,7 @@ export default function BatchCreateUser({ user, entities = [], permissions, onFi
if (infos.length > 0) {
crossReferenceEmails();
}
}, [infos]);
}, [infos, entity]);
const makeUsers = async () => {
const newUsersSentence = newUsers.length > 0 ? `create ${newUsers.length} user(s)` : undefined;
@@ -459,7 +484,16 @@ export default function BatchCreateUser({ user, entities = [], permissions, onFi
<Select
defaultValue={{ value: (entities || [])[0]?.id, label: (entities || [])[0]?.label }}
options={entities.map((e) => ({ value: e.id, label: e.label }))}
onChange={(e) => setEntity(e?.value || undefined)}
onChange={(e) => {
if (!e) {
setEntity(undefined);
return;
}
setEntity({
id: e?.value,
label: e?.label
});
}}
isClearable={checkAccess(user, ["admin", "developer"])}
/>
</div>

View File

@@ -22,7 +22,6 @@ import ShortUniqueId from "short-unique-id";
import { ExamProps } from "@/exams/types";
import useExamStore from "@/stores/exam";
import useEvaluationPolling from "@/hooks/useEvaluationPolling";
import PracticeModal from "@/components/PracticeModal";
interface Props {
page: "exams" | "exercises";
@@ -37,6 +36,7 @@ export default function ExamPage({ page, user, destination = "/", hideSidebar =
const [avoidRepeated, setAvoidRepeated] = useState(false);
const [showAbandonPopup, setShowAbandonPopup] = useState(false);
const [pendingExercises, setPendingExercises] = useState<string[]>([]);
const [shouldPoll, setShouldPoll] = useState(false);
const {
exam, setExam,
@@ -149,8 +149,10 @@ export default function ExamPage({ page, user, destination = "/", hideSidebar =
useEffect(() => {
if (flags.finalizeExam && moduleIndex !== -1) {
setModuleIndex(-1);
}
}, [flags, moduleIndex, setModuleIndex]);
}, [flags.finalizeExam, moduleIndex, setModuleIndex]);
useEffect(() => {
if (flags.finalizeExam && !flags.pendingEvaluation && pendingExercises.length === 0) {

View File

@@ -6,7 +6,9 @@ import axios from "axios";
import formidable from "formidable-serverless";
import fs from "fs";
import FormData from 'form-data';
import client from "@/lib/mongodb";
const db = client.db(process.env.MONGODB_DB);
export default withIronSessionApiRoute(handler, sessionOptions);
@@ -66,6 +68,34 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
}
}
// Check if there is one eval for the current exercise
const previousEval = await db.collection("evaluation").findOne({
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
})
// If there is delete it
if (previousEval) {
await db.collection("evaluation").deleteOne({
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
})
}
// Insert the new eval for the backend to place it's result
await db.collection("evaluation").insertOne(
{
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
type: "speaking_interactive",
task: fields.task,
status: "pending"
}
);
await axios.post(
`${process.env.BACKEND_URL}/grade/speaking/${fields.task}`,
formData,
@@ -97,7 +127,7 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
});
}
});
}
}
export const config = {
api: {

View File

@@ -6,6 +6,9 @@ import axios from "axios";
import formidable from "formidable-serverless";
import fs from "fs";
import FormData from 'form-data';
import client from "@/lib/mongodb";
const db = client.db(process.env.MONGODB_DB);
export default withIronSessionApiRoute(handler, sessionOptions);
@@ -41,6 +44,34 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
formData.append('audio_1', buffer, 'audio_1.wav');
fs.rmSync(audioFile.path);
// Check if there is one eval for the current exercise
const previousEval = await db.collection("evaluation").findOne({
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
})
// If there is delete it
if (previousEval) {
await db.collection("evaluation").deleteOne({
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
})
}
// Insert the new eval for the backend to place it's result
await db.collection("evaluation").insertOne(
{
user: fields.userId,
session_id: fields.sessionId,
exercise_id: fields.exerciseId,
type: "speaking",
task: 2,
status: "pending"
}
);
await axios.post(
`${process.env.BACKEND_URL}/grade/speaking/2`,
formData,

View File

@@ -1,7 +1,7 @@
import type {NextApiRequest, NextApiResponse} from "next";
import type { NextApiRequest, NextApiResponse } from "next";
import client from "@/lib/mongodb";
import {withIronSessionApiRoute} from "iron-session/next";
import {sessionOptions} from "@/lib/session";
import { withIronSessionApiRoute } from "iron-session/next";
import { sessionOptions } from "@/lib/session";
const db = client.db(process.env.MONGODB_DB);
@@ -13,22 +13,17 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
async function get(req: NextApiRequest, res: NextApiResponse) {
if (!req.session.user) {
res.status(401).json({ok: false});
res.status(401).json({ ok: false });
return;
}
const {sessionId, userId, exerciseIds} = req.query;
const exercises = (exerciseIds! as string).split(',');
const finishedEvaluations = await db.collection("evaluation").find({
const { sessionId, userId } = req.query;
const singleEval = await db.collection("evaluation").findOne({
session_id: sessionId,
user: userId,
$or: [
{ status: "completed" },
{ status: "error" }
],
exercise_id: { $in: exercises }
}).toArray();
status: "pending",
});
const finishedExerciseIds = finishedEvaluations.map(evaluation => evaluation.exercise_id);
res.status(200).json({ finishedExerciseIds });
res.status(200).json({ hasPendingEvaluation: singleEval !== null});
}

View File

@@ -3,6 +3,9 @@ import type { NextApiRequest, NextApiResponse } from "next";
import { withIronSessionApiRoute } from "iron-session/next";
import { sessionOptions } from "@/lib/session";
import axios from "axios";
import client from "@/lib/mongodb";
const db = client.db(process.env.MONGODB_DB);
interface Body {
userId: string;
@@ -22,13 +25,41 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
return;
}
const { task, ...body} = req.body as Body;
const taskNumber = task.toString() !== "1" && task.toString() !== "2" ? "1" : task.toString();
const body = req.body as Body;
const taskNumber = body.task.toString() !== "1" && body.task.toString() !== "2" ? "1" : body.task.toString();
// Check if there is one eval for the current exercise
const previousEval = await db.collection("evaluation").findOne({
user: body.userId,
session_id: body.sessionId,
exercise_id: body.exerciseId,
})
// If there is delete it
if (previousEval) {
await db.collection("evaluation").deleteOne({
user: body.userId,
session_id: body.sessionId,
exercise_id: body.exerciseId,
})
}
// Insert the new eval for the backend to place it's result
await db.collection("evaluation").insertOne(
{
user: body.userId,
session_id: body.sessionId,
exercise_id: body.exerciseId,
type: "writing",
task: body.task,
status: "pending"
}
);
await axios.post(`${process.env.BACKEND_URL}/grade/writing/${taskNumber}`, body, {
headers: {
Authorization: `Bearer ${process.env.BACKEND_JWT}`,
},
});
res.status(200).json({ok: true});
res.status(200).json({ ok: true });
}

View File

@@ -0,0 +1,36 @@
import type { NextApiRequest, NextApiResponse } from "next";
import { withIronSessionApiRoute } from "iron-session/next";
import { sessionOptions } from "@/lib/session";
import axios from "axios";
export default withIronSessionApiRoute(handler, sessionOptions);
async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method === "POST") return post(req, res);
return res.status(404).json({ ok: false });
}
async function post(req: NextApiRequest, res: NextApiResponse) {
if (!req.session.user) return res.status(401).json({ ok: false });
const response = await axios.post(
`${process.env.BACKEND_URL}/listening/instructions`,
req.body,
{
headers: {
Authorization: `Bearer ${process.env.BACKEND_JWT}`,
Accept: 'audio/mpeg'
},
responseType: 'arraybuffer',
}
);
res.writeHead(200, {
'Content-Type': 'audio/mpeg',
'Content-Length': response.data.length
});
res.end(response.data);
return;
}

View File

@@ -0,0 +1,42 @@
// Next.js API route support: https://nextjs.org/docs/api-routes/introduction
import type { NextApiRequest, NextApiResponse } from "next";
import client from "@/lib/mongodb";
import { withIronSessionApiRoute } from "iron-session/next";
import { sessionOptions } from "@/lib/session";
import { Stat } from "@/interfaces/user";
import { requestUser } from "@/utils/api";
import { UserSolution } from "@/interfaces/exam";
const db = client.db(process.env.MONGODB_DB);
export default withIronSessionApiRoute(handler, sessionOptions);
async function handler(req: NextApiRequest, res: NextApiResponse) {
if (req.method === "POST") return post(req, res);
}
interface Body {
solutions: UserSolution[];
sessionID: string;
}
async function post(req: NextApiRequest, res: NextApiResponse) {
const user = await requestUser(req, res)
if (!user) return res.status(401).json({ ok: false });
const { solutions, sessionID } = req.body as Body;
const disabledStats = await db.collection("stats").find({ user: user.id, session: sessionID, disabled: true }).toArray();
await Promise.all(disabledStats.map(async (stat) => {
const matchingSolution = solutions.find(s => s.exercise === stat.exercise);
if (matchingSolution) {
await db.collection("stats").updateOne(
{ id: stat.id },
{ $set: { ...matchingSolution } }
);
}
}));
return res.status(200).json({ ok: true });
}

View File

@@ -38,6 +38,9 @@ async function handler(req: NextApiRequest, res: NextApiResponse) {
await assignToEntity(req.body);
res.status(200).json({"ok": true});
break;
case 'getEntities':
res.status(200).json(await getEntities(req.body.emails))
break;
default:
res.status(400).json({ error: 'Invalid operation!' })
}
@@ -276,6 +279,36 @@ async function getIds(emails: string[]): Promise<Array<{ email: string; id: stri
}));
}
async function getEntities(emails: string[]): Promise<Array<{ email: string; entityLabels: string[] }>> {
const users = await db.collection('users')
.find({ email: { $in: emails } })
.project({ email: 1, entities: 1, _id: 0 })
.toArray();
const entityIds = [...new Set(
users.flatMap(user =>
(user.entities || []).map((entity: any) => entity.id)
)
)];
const entityRecords = await db.collection('entities')
.find({ id: { $in: entityIds } })
.project({ id: 1, label: 1, _id: 0 })
.toArray();
const entityMap = new Map(
entityRecords.map(entity => [entity.id, entity.label])
);
return users.map(user => ({
email: user.email,
entityLabels: (user.entities || [])
.map((entity: any) => entityMap.get(entity.id))
.filter((label: string): label is string => !!label)
}));
}
async function assignToEntity(body: any) {
const { ids, entity } = body;

View File

@@ -110,6 +110,10 @@ export default function Generation({ id, user, exam, examModule, permissions }:
}
});
if (state.listening.instructionsState.customInstructionsURL.startsWith('blob:')) {
URL.revokeObjectURL(state.listening.instructionsState.customInstructionsURL);
}
state.speaking.sections.forEach(section => {
const sectionState = section.state as Exercise;
if (sectionState.type === 'speaking') {

View File

@@ -166,6 +166,16 @@ const defaultModuleSettings = (module: Module, minTimer: number): ModuleState =>
importModule: true,
importing: false,
edit: [],
instructionsState: {
isInstructionsOpen: false,
chosenOption: { value: "Automatic", label: "Automatic" },
currentInstructions: "",
presetInstructions: "",
customInstructions: "",
currentInstructionsURL: "",
presetInstructionsURL: "",
customInstructionsURL: "",
},
};
if (["reading", "writing"].includes(module)) {
state["type"] = "general";

View File

@@ -108,6 +108,19 @@ export interface SectionState {
scriptLoading: boolean;
}
export interface ListeningInstructionsState {
isInstructionsOpen: boolean;
chosenOption: Option;
currentInstructions: string;
presetInstructions: string;
customInstructions: string;
currentInstructionsURL: string;
presetInstructionsURL: string;
customInstructionsURL: string;
}
export interface ModuleState {
examLabel: string;
sections: SectionState[];
@@ -122,6 +135,7 @@ export interface ModuleState {
edit: number[];
type?: "general" | "academic";
academic_url?: string | undefined;
instructionsState: ListeningInstructionsState;
}
export interface Avatar {