Merged in release/async (pull request #32)
Release/async Approved-by: Tiago Ribeiro
This commit is contained in:
@@ -5,4 +5,5 @@ README.md
|
|||||||
*.pyd
|
*.pyd
|
||||||
__pycache__
|
__pycache__
|
||||||
.pytest_cache
|
.pytest_cache
|
||||||
|
postman
|
||||||
/scripts
|
/scripts
|
||||||
|
|||||||
4
.gitignore
vendored
4
.gitignore
vendored
@@ -2,6 +2,6 @@ __pycache__
|
|||||||
.idea
|
.idea
|
||||||
.env
|
.env
|
||||||
.DS_Store
|
.DS_Store
|
||||||
/firebase-configs/test_firebase.json
|
.venv
|
||||||
/scripts
|
_scripts
|
||||||
*.env
|
*.env
|
||||||
17
.idea/ielts-be.iml
generated
17
.idea/ielts-be.iml
generated
@@ -1,17 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<module type="PYTHON_MODULE" version="4">
|
|
||||||
<component name="NewModuleRootManager">
|
|
||||||
<content url="file://$MODULE_DIR$">
|
|
||||||
<excludeFolder url="file://$MODULE_DIR$/.venv" />
|
|
||||||
</content>
|
|
||||||
<orderEntry type="jdk" jdkName="Python 3.11 (ielts-be)" jdkType="Python SDK" />
|
|
||||||
<orderEntry type="sourceFolder" forTests="false" />
|
|
||||||
</component>
|
|
||||||
<component name="PackageRequirementsSettings">
|
|
||||||
<option name="versionSpecifier" value="Don't specify version" />
|
|
||||||
</component>
|
|
||||||
<component name="PyDocumentationSettings">
|
|
||||||
<option name="format" value="GOOGLE" />
|
|
||||||
<option name="myDocStringFormat" value="Google" />
|
|
||||||
</component>
|
|
||||||
</module>
|
|
||||||
6
.idea/inspectionProfiles/profiles_settings.xml
generated
6
.idea/inspectionProfiles/profiles_settings.xml
generated
@@ -1,6 +0,0 @@
|
|||||||
<component name="InspectionProjectProfileManager">
|
|
||||||
<settings>
|
|
||||||
<option name="USE_PROJECT_PROFILE" value="false" />
|
|
||||||
<version value="1.0" />
|
|
||||||
</settings>
|
|
||||||
</component>
|
|
||||||
10
.idea/misc.xml
generated
10
.idea/misc.xml
generated
@@ -1,10 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="Black">
|
|
||||||
<option name="sdkName" value="Python 3.11 (ielts-be)" />
|
|
||||||
</component>
|
|
||||||
<component name="ProjectRootManager" version="2" project-jdk-name="Python 3.11 (ielts-be)" project-jdk-type="Python SDK" />
|
|
||||||
<component name="PyCharmProfessionalAdvertiser">
|
|
||||||
<option name="shown" value="true" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
8
.idea/modules.xml
generated
8
.idea/modules.xml
generated
@@ -1,8 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="ProjectModuleManager">
|
|
||||||
<modules>
|
|
||||||
<module fileurl="file://$PROJECT_DIR$/.idea/ielts-be.iml" filepath="$PROJECT_DIR$/.idea/ielts-be.iml" />
|
|
||||||
</modules>
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
6
.idea/vcs.xml
generated
6
.idea/vcs.xml
generated
@@ -1,6 +0,0 @@
|
|||||||
<?xml version="1.0" encoding="UTF-8"?>
|
|
||||||
<project version="4">
|
|
||||||
<component name="VcsDirectoryMappings">
|
|
||||||
<mapping directory="" vcs="Git" />
|
|
||||||
</component>
|
|
||||||
</project>
|
|
||||||
25
Dockerfile
25
Dockerfile
@@ -1,6 +1,10 @@
|
|||||||
|
FROM python:3.11-slim as requirements-stage
|
||||||
|
WORKDIR /tmp
|
||||||
|
RUN pip install poetry
|
||||||
|
COPY pyproject.toml ./poetry.lock* /tmp/
|
||||||
|
RUN poetry export -f requirements.txt --output requirements.txt --without-hashes
|
||||||
|
|
||||||
|
|
||||||
# Use the official lightweight Python image.
|
|
||||||
# https://hub.docker.com/_/python
|
|
||||||
FROM python:3.11-slim
|
FROM python:3.11-slim
|
||||||
|
|
||||||
# Allow statements and log messages to immediately appear in the logs
|
# Allow statements and log messages to immediately appear in the logs
|
||||||
@@ -9,8 +13,11 @@ ENV PYTHONUNBUFFERED True
|
|||||||
# Copy local code to the container image.
|
# Copy local code to the container image.
|
||||||
ENV APP_HOME /app
|
ENV APP_HOME /app
|
||||||
WORKDIR $APP_HOME
|
WORKDIR $APP_HOME
|
||||||
|
|
||||||
COPY . ./
|
COPY . ./
|
||||||
|
|
||||||
|
COPY --from=requirements-stage /tmp/requirements.txt /app/requirements.txt
|
||||||
|
|
||||||
RUN apt update && apt install -y \
|
RUN apt update && apt install -y \
|
||||||
ffmpeg \
|
ffmpeg \
|
||||||
poppler-utils \
|
poppler-utils \
|
||||||
@@ -20,23 +27,15 @@ RUN apt update && apt install -y \
|
|||||||
texlive-xetex \
|
texlive-xetex \
|
||||||
pandoc \
|
pandoc \
|
||||||
librsvg2-bin \
|
librsvg2-bin \
|
||||||
curl \
|
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
RUN pip install --no-cache-dir -r /app/requirements.txt
|
||||||
|
|
||||||
RUN curl -sL https://deb.nodesource.com/setup_20.x | bash - \
|
EXPOSE 8000
|
||||||
&& apt-get install -y nodejs
|
|
||||||
|
|
||||||
RUN npm install -g firebase-tools
|
|
||||||
|
|
||||||
# Install production dependencies.
|
|
||||||
RUN pip install --no-cache-dir -r requirements.txt
|
|
||||||
|
|
||||||
EXPOSE 5000
|
|
||||||
|
|
||||||
# Run the web service on container startup. Here we use the gunicorn
|
# Run the web service on container startup. Here we use the gunicorn
|
||||||
# webserver, with one worker process and 8 threads.
|
# webserver, with one worker process and 8 threads.
|
||||||
# For environments with multiple CPU cores, increase the number of workers
|
# For environments with multiple CPU cores, increase the number of workers
|
||||||
# to be equal to the cores available.
|
# to be equal to the cores available.
|
||||||
# Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance scaling.
|
# Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance scaling.
|
||||||
CMD exec gunicorn --bind 0.0.0.0:5000 --workers 1 --threads 8 --timeout 0 app:app
|
CMD exec uvicorn --bind 0.0.0.0:8000 --workers 1 --threads 8 --timeout 0 app.server:app
|
||||||
|
|||||||
26
README.md
Normal file
26
README.md
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
# Run the app
|
||||||
|
|
||||||
|
1. pip install poetry
|
||||||
|
2. poetry install
|
||||||
|
3. python app.py
|
||||||
|
|
||||||
|
# Modules
|
||||||
|
|
||||||
|
- api -> endpoints
|
||||||
|
- configs -> app configs and constants
|
||||||
|
- controllers -> meant for handling exceptions, transforming data or orchestrate complex use cases with several services, for now mostly just calls services directly
|
||||||
|
- dtos -> pydantic models used for receiving data and for validation
|
||||||
|
- exceptions -> if custom exceptions are needed to throw in services so they can be handled in the controllers to construct some specific http response
|
||||||
|
- helpers -> a bunch of lightweight functions grouped by some kind of logic
|
||||||
|
- mappers -> to map complex data
|
||||||
|
- middlewares -> classes that are run before executing the endpoint code
|
||||||
|
- repositories -> interfaces with data stores
|
||||||
|
- services -> all the business logic goes here
|
||||||
|
- utils -> loose functions used on one-off occasions
|
||||||
|
|
||||||
|
# Dependency injection
|
||||||
|
|
||||||
|
If you want to add new controllers/services/repositories you will have to change
|
||||||
|
app/configs/dependency_injection.py
|
||||||
|
|
||||||
|
Also make sure you have @inject on your endpoint when calling these.
|
||||||
0
app/__init__.py
Normal file
0
app/__init__.py
Normal file
28
app/api/__init__.py
Normal file
28
app/api/__init__.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from fastapi import APIRouter
|
||||||
|
|
||||||
|
from .listening import listening_router
|
||||||
|
from .reading import reading_router
|
||||||
|
from .speaking import speaking_router
|
||||||
|
from .training import training_router
|
||||||
|
from .writing import writing_router
|
||||||
|
from .grade import grade_router
|
||||||
|
from .user import user_router
|
||||||
|
from .level import level_router
|
||||||
|
|
||||||
|
router = APIRouter(prefix="/api", tags=["Home"])
|
||||||
|
|
||||||
|
@router.get('/healthcheck')
|
||||||
|
async def healthcheck():
|
||||||
|
return {"healthy": True}
|
||||||
|
|
||||||
|
exercises_router = APIRouter()
|
||||||
|
exercises_router.include_router(listening_router, prefix="/listening", tags=["Listening"])
|
||||||
|
exercises_router.include_router(reading_router, prefix="/reading", tags=["Reading"])
|
||||||
|
exercises_router.include_router(speaking_router, prefix="/speaking", tags=["Speaking"])
|
||||||
|
exercises_router.include_router(writing_router, prefix="/writing", tags=["Writing"])
|
||||||
|
exercises_router.include_router(level_router, prefix="/level", tags=["Level"])
|
||||||
|
|
||||||
|
router.include_router(grade_router, prefix="/grade", tags=["Grade"])
|
||||||
|
router.include_router(training_router, prefix="/training", tags=["Training"])
|
||||||
|
router.include_router(user_router, prefix="/user", tags=["Users"])
|
||||||
|
router.include_router(exercises_router)
|
||||||
74
app/api/grade.py
Normal file
74
app/api/grade.py
Normal file
@@ -0,0 +1,74 @@
|
|||||||
|
from dependency_injector.wiring import inject, Provide
|
||||||
|
from fastapi import APIRouter, Depends, Path, Request
|
||||||
|
|
||||||
|
from app.controllers.abc import IGradeController
|
||||||
|
from app.dtos.writing import WritingGradeTaskDTO
|
||||||
|
from app.dtos.speaking import GradeSpeakingAnswersDTO, GradeSpeakingDTO
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
|
||||||
|
controller = "grade_controller"
|
||||||
|
grade_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@grade_router.post(
|
||||||
|
'/writing/{task}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def grade_writing_task(
|
||||||
|
data: WritingGradeTaskDTO,
|
||||||
|
task: int = Path(..., ge=1, le=2),
|
||||||
|
grade_controller: IGradeController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await grade_controller.grade_writing_task(task, data)
|
||||||
|
|
||||||
|
|
||||||
|
@grade_router.post(
|
||||||
|
'/speaking/2',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def grade_speaking_task_2(
|
||||||
|
data: GradeSpeakingDTO,
|
||||||
|
grade_controller: IGradeController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await grade_controller.grade_speaking_task(2, [data.dict()])
|
||||||
|
|
||||||
|
|
||||||
|
@grade_router.post(
|
||||||
|
'/speaking/{task}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def grade_speaking_task_1_and_3(
|
||||||
|
data: GradeSpeakingAnswersDTO,
|
||||||
|
task: int = Path(..., ge=1, le=3),
|
||||||
|
grade_controller: IGradeController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await grade_controller.grade_speaking_task(task, data.answers)
|
||||||
|
|
||||||
|
|
||||||
|
@grade_router.post(
|
||||||
|
'/summary',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def grading_summary(
|
||||||
|
request: Request,
|
||||||
|
grade_controller: IGradeController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
data = await request.json()
|
||||||
|
return await grade_controller.grading_summary(data)
|
||||||
|
|
||||||
|
|
||||||
|
@grade_router.post(
|
||||||
|
'/short_answers',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def grade_short_answers(
|
||||||
|
request: Request,
|
||||||
|
grade_controller: IGradeController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
data = await request.json()
|
||||||
|
return await grade_controller.grade_short_answers(data)
|
||||||
9
app/api/home.py
Normal file
9
app/api/home.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from fastapi import APIRouter
|
||||||
|
home_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@home_router.get(
|
||||||
|
'/healthcheck'
|
||||||
|
)
|
||||||
|
async def healthcheck():
|
||||||
|
return {"healthy": True}
|
||||||
67
app/api/level.py
Normal file
67
app/api/level.py
Normal file
@@ -0,0 +1,67 @@
|
|||||||
|
from dependency_injector.wiring import Provide, inject
|
||||||
|
from fastapi import APIRouter, Depends, UploadFile, Request
|
||||||
|
|
||||||
|
from app.dtos.level import LevelExercisesDTO
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.controllers.abc import ILevelController
|
||||||
|
|
||||||
|
controller = "level_controller"
|
||||||
|
level_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@level_router.post(
|
||||||
|
'/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_exercises(
|
||||||
|
dto: LevelExercisesDTO,
|
||||||
|
level_controller: ILevelController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await level_controller.generate_exercises(dto)
|
||||||
|
|
||||||
|
@level_router.get(
|
||||||
|
'/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def get_level_exam(
|
||||||
|
level_controller: ILevelController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await level_controller.get_level_exam()
|
||||||
|
|
||||||
|
|
||||||
|
@level_router.get(
|
||||||
|
'/utas',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def get_level_utas(
|
||||||
|
level_controller: ILevelController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await level_controller.get_level_utas()
|
||||||
|
|
||||||
|
|
||||||
|
@level_router.post(
|
||||||
|
'/import/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def upload(
|
||||||
|
file: UploadFile,
|
||||||
|
level_controller: ILevelController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await level_controller.upload_level(file)
|
||||||
|
|
||||||
|
|
||||||
|
@level_router.post(
|
||||||
|
'/custom/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def custom_level(
|
||||||
|
request: Request,
|
||||||
|
level_controller: ILevelController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
data = await request.json()
|
||||||
|
return await level_controller.get_custom_level(data)
|
||||||
63
app/api/listening.py
Normal file
63
app/api/listening.py
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import random
|
||||||
|
|
||||||
|
from dependency_injector.wiring import Provide, inject
|
||||||
|
from fastapi import APIRouter, Depends, Path, Query
|
||||||
|
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.controllers.abc import IListeningController
|
||||||
|
from app.configs.constants import EducationalContent, ListeningExerciseType
|
||||||
|
from app.dtos.listening import SaveListeningDTO, GenerateListeningExercises, Dialog
|
||||||
|
|
||||||
|
controller = "listening_controller"
|
||||||
|
listening_router = APIRouter()
|
||||||
|
|
||||||
|
@listening_router.get(
|
||||||
|
'/{section}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_listening_dialog(
|
||||||
|
section: int = Path(..., ge=1, le=4),
|
||||||
|
difficulty: str = Query(default=None),
|
||||||
|
topic: str = Query(default=None),
|
||||||
|
listening_controller: IListeningController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
difficulty = random.choice(EducationalContent.DIFFICULTIES) if not difficulty else difficulty
|
||||||
|
topic = random.choice(EducationalContent.TOPICS) if not topic else topic
|
||||||
|
return await listening_controller.generate_listening_dialog(section, difficulty, topic)
|
||||||
|
|
||||||
|
@listening_router.post(
|
||||||
|
'/media',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_mp3(
|
||||||
|
dto: Dialog,
|
||||||
|
listening_controller: IListeningController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await listening_controller.generate_mp3(dto)
|
||||||
|
|
||||||
|
|
||||||
|
@listening_router.post(
|
||||||
|
'/{section}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_listening_exercise(
|
||||||
|
dto: GenerateListeningExercises,
|
||||||
|
section: int = Path(..., ge=1, le=4),
|
||||||
|
listening_controller: IListeningController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await listening_controller.get_listening_question(section, dto)
|
||||||
|
|
||||||
|
|
||||||
|
@listening_router.post(
|
||||||
|
'/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def save_listening(
|
||||||
|
data: SaveListeningDTO,
|
||||||
|
listening_controller: IListeningController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await listening_controller.save_listening(data)
|
||||||
51
app/api/reading.py
Normal file
51
app/api/reading.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from dependency_injector.wiring import Provide, inject
|
||||||
|
from fastapi import APIRouter, Depends, Path, Query, UploadFile
|
||||||
|
|
||||||
|
from app.dtos.reading import ReadingDTO
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.controllers.abc import IReadingController
|
||||||
|
|
||||||
|
controller = "reading_controller"
|
||||||
|
reading_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@reading_router.post(
|
||||||
|
'/import',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def upload(
|
||||||
|
exercises: UploadFile,
|
||||||
|
solutions: UploadFile = None,
|
||||||
|
reading_controller: IReadingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
print(exercises.filename)
|
||||||
|
#print(solutions.filename)
|
||||||
|
return await reading_controller.import_exam(exercises, solutions)
|
||||||
|
|
||||||
|
@reading_router.get(
|
||||||
|
'/{passage}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_passage(
|
||||||
|
topic: Optional[str] = Query(None),
|
||||||
|
word_count: Optional[int] = Query(None),
|
||||||
|
passage: int = Path(..., ge=1, le=3),
|
||||||
|
reading_controller: IReadingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await reading_controller.generate_reading_passage(passage, topic, word_count)
|
||||||
|
|
||||||
|
@reading_router.post(
|
||||||
|
'/{passage}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_reading(
|
||||||
|
dto: ReadingDTO,
|
||||||
|
passage: int = Path(..., ge=1, le=3),
|
||||||
|
reading_controller: IReadingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await reading_controller.generate_reading_exercises(passage, dto)
|
||||||
98
app/api/speaking.py
Normal file
98
app/api/speaking.py
Normal file
@@ -0,0 +1,98 @@
|
|||||||
|
import random
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from dependency_injector.wiring import inject, Provide
|
||||||
|
from fastapi import APIRouter, Path, Query, Depends, BackgroundTasks
|
||||||
|
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.configs.constants import EducationalContent
|
||||||
|
from app.controllers.abc import ISpeakingController
|
||||||
|
from app.dtos.speaking import (
|
||||||
|
SaveSpeakingDTO, GenerateVideo1DTO, GenerateVideo2DTO, GenerateVideo3DTO
|
||||||
|
)
|
||||||
|
|
||||||
|
controller = "speaking_controller"
|
||||||
|
speaking_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.get(
|
||||||
|
'/1',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def get_speaking_task(
|
||||||
|
first_topic: str = Query(default=random.choice(EducationalContent.MTI_TOPICS)),
|
||||||
|
second_topic: str = Query(default=random.choice(EducationalContent.MTI_TOPICS)),
|
||||||
|
difficulty: str = Query(default=random.choice(EducationalContent.DIFFICULTIES)),
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.get_speaking_part(1, first_topic, difficulty, second_topic)
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.get(
|
||||||
|
'/{task}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def get_speaking_task(
|
||||||
|
task: int = Path(..., ge=2, le=3),
|
||||||
|
topic: str = Query(default=random.choice(EducationalContent.MTI_TOPICS)),
|
||||||
|
difficulty: str = Query(default=random.choice(EducationalContent.DIFFICULTIES)),
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.get_speaking_part(task, topic, difficulty)
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.post(
|
||||||
|
'/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def save_speaking(
|
||||||
|
data: SaveSpeakingDTO,
|
||||||
|
background_tasks: BackgroundTasks,
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.save_speaking(data, background_tasks)
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.post(
|
||||||
|
'/generate_video/1',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_video_1(
|
||||||
|
data: GenerateVideo1DTO,
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.generate_video(
|
||||||
|
1, data.avatar, data.first_topic, data.questions, second_topic=data.second_topic
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.post(
|
||||||
|
'/generate_video/2',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_video_2(
|
||||||
|
data: GenerateVideo2DTO,
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.generate_video(
|
||||||
|
2, data.avatar, data.topic, [data.question], prompts=data.prompts, suffix=data.suffix
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@speaking_router.post(
|
||||||
|
'/generate_video/3',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_video_3(
|
||||||
|
data: GenerateVideo3DTO,
|
||||||
|
speaking_controller: ISpeakingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await speaking_controller.generate_video(
|
||||||
|
3, data.avatar, data.topic, data.questions
|
||||||
|
)
|
||||||
34
app/api/training.py
Normal file
34
app/api/training.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
from dependency_injector.wiring import Provide, inject
|
||||||
|
from fastapi import APIRouter, Depends, Request
|
||||||
|
|
||||||
|
from app.dtos.training import FetchTipsDTO
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.controllers.abc import ITrainingController
|
||||||
|
|
||||||
|
controller = "training_controller"
|
||||||
|
training_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@training_router.post(
|
||||||
|
'/tips',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def get_reading_passage(
|
||||||
|
data: FetchTipsDTO,
|
||||||
|
training_controller: ITrainingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await training_controller.fetch_tips(data)
|
||||||
|
|
||||||
|
|
||||||
|
@training_router.post(
|
||||||
|
'/',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def training_content(
|
||||||
|
request: Request,
|
||||||
|
training_controller: ITrainingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
data = await request.json()
|
||||||
|
return await training_controller.get_training_content(data)
|
||||||
21
app/api/user.py
Normal file
21
app/api/user.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from dependency_injector.wiring import Provide, inject
|
||||||
|
from fastapi import APIRouter, Depends
|
||||||
|
|
||||||
|
from app.dtos.user_batch import BatchUsersDTO
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.controllers.abc import IUserController
|
||||||
|
|
||||||
|
controller = "user_controller"
|
||||||
|
user_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@user_router.post(
|
||||||
|
'/import',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def batch_import(
|
||||||
|
batch: BatchUsersDTO,
|
||||||
|
user_controller: IUserController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
return await user_controller.batch_import(batch)
|
||||||
27
app/api/writing.py
Normal file
27
app/api/writing.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
import random
|
||||||
|
|
||||||
|
from dependency_injector.wiring import inject, Provide
|
||||||
|
from fastapi import APIRouter, Path, Query, Depends
|
||||||
|
|
||||||
|
from app.middlewares import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
from app.configs.constants import EducationalContent
|
||||||
|
from app.controllers.abc import IWritingController
|
||||||
|
|
||||||
|
controller = "writing_controller"
|
||||||
|
writing_router = APIRouter()
|
||||||
|
|
||||||
|
|
||||||
|
@writing_router.get(
|
||||||
|
'/{task}',
|
||||||
|
dependencies=[Depends(Authorized([IsAuthenticatedViaBearerToken]))]
|
||||||
|
)
|
||||||
|
@inject
|
||||||
|
async def generate_writing(
|
||||||
|
task: int = Path(..., ge=1, le=2),
|
||||||
|
difficulty: str = Query(default=None),
|
||||||
|
topic: str = Query(default=None),
|
||||||
|
writing_controller: IWritingController = Depends(Provide[controller])
|
||||||
|
):
|
||||||
|
difficulty = random.choice(EducationalContent.DIFFICULTIES) if not difficulty else difficulty
|
||||||
|
topic = random.choice(EducationalContent.MTI_TOPICS) if not topic else topic
|
||||||
|
return await writing_controller.get_writing_task_general_question(task, topic, difficulty)
|
||||||
5
app/configs/__init__.py
Normal file
5
app/configs/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from .dependency_injection import DependencyInjector
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DependencyInjector"
|
||||||
|
]
|
||||||
857
app/configs/constants.py
Normal file
857
app/configs/constants.py
Normal file
@@ -0,0 +1,857 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
########################################################################################################################
|
||||||
|
# DISCLAIMER #
|
||||||
|
# #
|
||||||
|
# All the array and dict "constants" are mutable variables, if somewhere in the app you modify them in any way, shape #
|
||||||
|
# or form all the other methods that will use these "constants" will also use the modified version. If you're unsure #
|
||||||
|
# whether a method will modify it use copy's deepcopy: #
|
||||||
|
# #
|
||||||
|
# from copy import deepcopy #
|
||||||
|
# #
|
||||||
|
# new_ref = deepcopy(CONSTANT) #
|
||||||
|
# #
|
||||||
|
# Using a wrapper method that returns a "constant" won't handle nested mutables. #
|
||||||
|
########################################################################################################################
|
||||||
|
|
||||||
|
BLACKLISTED_WORDS = ["jesus", "sex", "gay", "lesbian", "homosexual", "god", "angel", "pornography", "beer", "wine",
|
||||||
|
"cocaine", "alcohol", "nudity", "lgbt", "casino", "gambling", "catholicism",
|
||||||
|
"discrimination", "politic", "christianity", "islam", "christian", "christians",
|
||||||
|
"jews", "jew", "discrimination", "discriminatory"]
|
||||||
|
|
||||||
|
|
||||||
|
class UserDefaults:
|
||||||
|
DESIRED_LEVELS = {
|
||||||
|
"reading": 9,
|
||||||
|
"listening": 9,
|
||||||
|
"writing": 9,
|
||||||
|
"speaking": 9,
|
||||||
|
}
|
||||||
|
|
||||||
|
LEVELS = {
|
||||||
|
"reading": 0,
|
||||||
|
"listening": 0,
|
||||||
|
"writing": 0,
|
||||||
|
"speaking": 0,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ExamVariant(Enum):
|
||||||
|
FULL = "full"
|
||||||
|
PARTIAL = "partial"
|
||||||
|
|
||||||
|
|
||||||
|
class ReadingExerciseType(str, Enum):
|
||||||
|
fillBlanks = "fillBlanks"
|
||||||
|
writeBlanks = "writeBlanks"
|
||||||
|
trueFalse = "trueFalse"
|
||||||
|
paragraphMatch = "paragraphMatch"
|
||||||
|
ideaMatch = "ideaMatch"
|
||||||
|
|
||||||
|
|
||||||
|
class ListeningExerciseType(str, Enum):
|
||||||
|
multipleChoice = "multipleChoice"
|
||||||
|
multipleChoice3Options = "multipleChoice3Options"
|
||||||
|
writeBlanksQuestions = "writeBlanksQuestions"
|
||||||
|
writeBlanksFill = "writeBlanksFill"
|
||||||
|
writeBlanksForm = "writeBlanksForm"
|
||||||
|
|
||||||
|
class LevelExerciseType(str, Enum):
|
||||||
|
multipleChoice = "multipleChoice"
|
||||||
|
mcBlank = "mcBlank"
|
||||||
|
mcUnderline = "mcUnderline"
|
||||||
|
blankSpace = "blankSpaceText"
|
||||||
|
passageUtas = "passageUtas"
|
||||||
|
fillBlanksMC = "fillBlanksMC"
|
||||||
|
|
||||||
|
|
||||||
|
class CustomLevelExerciseTypes(Enum):
|
||||||
|
MULTIPLE_CHOICE_4 = "multiple_choice_4"
|
||||||
|
MULTIPLE_CHOICE_BLANK_SPACE = "multiple_choice_blank_space"
|
||||||
|
MULTIPLE_CHOICE_UNDERLINED = "multiple_choice_underlined"
|
||||||
|
BLANK_SPACE_TEXT = "blank_space_text"
|
||||||
|
READING_PASSAGE_UTAS = "reading_passage_utas"
|
||||||
|
WRITING_LETTER = "writing_letter"
|
||||||
|
WRITING_2 = "writing_2"
|
||||||
|
SPEAKING_1 = "speaking_1"
|
||||||
|
SPEAKING_2 = "speaking_2"
|
||||||
|
SPEAKING_3 = "speaking_3"
|
||||||
|
READING_1 = "reading_1"
|
||||||
|
READING_2 = "reading_2"
|
||||||
|
READING_3 = "reading_3"
|
||||||
|
LISTENING_1 = "listening_1"
|
||||||
|
LISTENING_2 = "listening_2"
|
||||||
|
LISTENING_3 = "listening_3"
|
||||||
|
LISTENING_4 = "listening_4"
|
||||||
|
|
||||||
|
|
||||||
|
class QuestionType(Enum):
|
||||||
|
LISTENING_SECTION_1 = "Listening Section 1"
|
||||||
|
LISTENING_SECTION_2 = "Listening Section 2"
|
||||||
|
LISTENING_SECTION_3 = "Listening Section 3"
|
||||||
|
LISTENING_SECTION_4 = "Listening Section 4"
|
||||||
|
WRITING_TASK_1 = "Writing Task 1"
|
||||||
|
WRITING_TASK_2 = "Writing Task 2"
|
||||||
|
SPEAKING_1 = "Speaking Task Part 1"
|
||||||
|
SPEAKING_2 = "Speaking Task Part 2"
|
||||||
|
READING_PASSAGE_1 = "Reading Passage 1"
|
||||||
|
READING_PASSAGE_2 = "Reading Passage 2"
|
||||||
|
READING_PASSAGE_3 = "Reading Passage 3"
|
||||||
|
|
||||||
|
|
||||||
|
class HeygenAvatars(Enum):
|
||||||
|
MATTHEW_NOAH = "5912afa7c77c47d3883af3d874047aaf"
|
||||||
|
VERA_CERISE = "9e58d96a383e4568a7f1e49df549e0e4"
|
||||||
|
EDWARD_TONY = "d2cdd9c0379a4d06ae2afb6e5039bd0c"
|
||||||
|
TANYA_MOLLY = "045cb5dcd00042b3a1e4f3bc1c12176b"
|
||||||
|
KAYLA_ABBI = "1ae1e5396cc444bfad332155fdb7a934"
|
||||||
|
JEROME_RYAN = "0ee6aa7cc1084063a630ae514fccaa31"
|
||||||
|
TYLER_CHRISTOPHER = "5772cff935844516ad7eeff21f839e43"
|
||||||
|
|
||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
class ELAIAvatars(Enum):
|
||||||
|
# Works
|
||||||
|
GIA_BUSINESS = {
|
||||||
|
"avatar_code": "gia.business",
|
||||||
|
"avatar_gender": "female",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/gia/business/gia_business.png",
|
||||||
|
"avatar_canvas": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/gia/business/gia_business.png",
|
||||||
|
"voice_id": "EXAVITQu4vr4xnSDxMaL",
|
||||||
|
"voice_provider": "elevenlabs"
|
||||||
|
}
|
||||||
|
|
||||||
|
# Works
|
||||||
|
VADIM_BUSINESS = {
|
||||||
|
"avatar_code": "vadim.business",
|
||||||
|
"avatar_gender": "male",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/vadim/business/vadim_business.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/vadim/business/vadim_business.png",
|
||||||
|
"voice_id": "flq6f7yk4E4fJM5XTYuZ",
|
||||||
|
"voice_provider": "elevenlabs"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
ORHAN_BUSINESS = {
|
||||||
|
"avatar_code": "orhan.business",
|
||||||
|
"avatar_gender": "male",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/orhan/business/orhan.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/orhan/business/orhan.png",
|
||||||
|
"voice_id": "en-US-AndrewMultilingualNeural",
|
||||||
|
"voice_provider": "azure"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
FLORA_BUSINESS = {
|
||||||
|
"avatar_code": "flora.business",
|
||||||
|
"avatar_gender": "female",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/flora/business/flora_business.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/flora/business/flora_business.png",
|
||||||
|
"voice_id": "en-US-JaneNeural",
|
||||||
|
"voice_provider": "azure"
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
SCARLETT_BUSINESS = {
|
||||||
|
"avatar_code": "scarlett.business",
|
||||||
|
"avatar_gender": "female",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/scarlett/business/scarlett_business.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/scarlett/business/scarlett_business.png",
|
||||||
|
"voice_id": "en-US-NancyNeural",
|
||||||
|
"voice_provider": "azure"
|
||||||
|
}
|
||||||
|
|
||||||
|
PARKER_CASUAL = {
|
||||||
|
"avatar_code": "parker.casual",
|
||||||
|
"avatar_gender": "male",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/parker/casual/parker_casual.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/parker/casual/parker_casual.png",
|
||||||
|
"voice_id": "en-US-TonyNeural",
|
||||||
|
"voice_provider": "azure"
|
||||||
|
}
|
||||||
|
|
||||||
|
ETHAN_BUSINESS = {
|
||||||
|
"avatar_code": "ethan.business",
|
||||||
|
"avatar_gender": "male",
|
||||||
|
"avatar_url": "https://elai-avatars.s3.us-east-2.amazonaws.com/common/ethan/business/ethan_business_low.png",
|
||||||
|
"avatar_canvas": "https://d3u63mhbhkevz8.cloudfront.net/common/ethan/business/ethan_business_low.png",
|
||||||
|
"voice_id": "en-US-JasonNeural",
|
||||||
|
"voice_provider": "azure"
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class FilePaths:
|
||||||
|
AUDIO_FILES_PATH = 'download-audio/'
|
||||||
|
FIREBASE_LISTENING_AUDIO_FILES_PATH = 'listening_recordings/'
|
||||||
|
VIDEO_FILES_PATH = 'download-video/'
|
||||||
|
FIREBASE_SPEAKING_VIDEO_FILES_PATH = 'speaking_videos/'
|
||||||
|
|
||||||
|
|
||||||
|
class TemperatureSettings:
|
||||||
|
GRADING_TEMPERATURE = 0.1
|
||||||
|
TIPS_TEMPERATURE = 0.2
|
||||||
|
GEN_QUESTION_TEMPERATURE = 0.7
|
||||||
|
|
||||||
|
|
||||||
|
class GPTModels:
|
||||||
|
GPT_3_5_TURBO = "gpt-3.5-turbo"
|
||||||
|
GPT_4_TURBO = "gpt-4-turbo"
|
||||||
|
GPT_4_O = "gpt-4o"
|
||||||
|
GPT_3_5_TURBO_16K = "gpt-3.5-turbo-16k"
|
||||||
|
GPT_3_5_TURBO_INSTRUCT = "gpt-3.5-turbo-instruct"
|
||||||
|
GPT_4_PREVIEW = "gpt-4-turbo-preview"
|
||||||
|
|
||||||
|
|
||||||
|
class FieldsAndExercises:
|
||||||
|
GRADING_FIELDS = ['comment', 'overall', 'task_response']
|
||||||
|
GEN_FIELDS = ['topic']
|
||||||
|
GEN_TEXT_FIELDS = ['title']
|
||||||
|
LISTENING_GEN_FIELDS = ['transcript', 'exercise']
|
||||||
|
READING_EXERCISE_TYPES = ['fillBlanks', 'writeBlanks', 'trueFalse', 'paragraphMatch']
|
||||||
|
READING_3_EXERCISE_TYPES = ['fillBlanks', 'writeBlanks', 'trueFalse', 'paragraphMatch', 'ideaMatch']
|
||||||
|
|
||||||
|
LISTENING_EXERCISE_TYPES = ['multipleChoice', 'writeBlanksQuestions', 'writeBlanksFill', 'writeBlanksForm']
|
||||||
|
LISTENING_1_EXERCISE_TYPES = ['multipleChoice', 'writeBlanksQuestions', 'writeBlanksFill', 'writeBlanksFill',
|
||||||
|
'writeBlanksForm', 'writeBlanksForm', 'writeBlanksForm', 'writeBlanksForm']
|
||||||
|
LISTENING_2_EXERCISE_TYPES = ['multipleChoice', 'writeBlanksQuestions']
|
||||||
|
LISTENING_3_EXERCISE_TYPES = ['multipleChoice3Options', 'writeBlanksQuestions']
|
||||||
|
LISTENING_4_EXERCISE_TYPES = ['multipleChoice', 'writeBlanksQuestions', 'writeBlanksFill', 'writeBlanksForm']
|
||||||
|
|
||||||
|
TOTAL_READING_PASSAGE_1_EXERCISES = 13
|
||||||
|
TOTAL_READING_PASSAGE_2_EXERCISES = 13
|
||||||
|
TOTAL_READING_PASSAGE_3_EXERCISES = 14
|
||||||
|
|
||||||
|
TOTAL_LISTENING_SECTION_1_EXERCISES = 10
|
||||||
|
TOTAL_LISTENING_SECTION_2_EXERCISES = 10
|
||||||
|
TOTAL_LISTENING_SECTION_3_EXERCISES = 10
|
||||||
|
TOTAL_LISTENING_SECTION_4_EXERCISES = 10
|
||||||
|
|
||||||
|
|
||||||
|
class MinTimers:
|
||||||
|
LISTENING_MIN_TIMER_DEFAULT = 30
|
||||||
|
WRITING_MIN_TIMER_DEFAULT = 60
|
||||||
|
SPEAKING_MIN_TIMER_DEFAULT = 14
|
||||||
|
|
||||||
|
|
||||||
|
class Voices:
|
||||||
|
EN_US_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Salli', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Salli',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Matthew', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Matthew',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Kimberly', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Kimberly',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Kendra', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Kendra',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Justin', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Justin',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Joey', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Joey',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Joanna', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Joanna',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Ivy', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Ivy',
|
||||||
|
'SupportedEngines': ['neural', 'standard']}]
|
||||||
|
EN_GB_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Emma', 'LanguageCode': 'en-GB', 'LanguageName': 'British English', 'Name': 'Emma',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Brian', 'LanguageCode': 'en-GB', 'LanguageName': 'British English', 'Name': 'Brian',
|
||||||
|
'SupportedEngines': ['neural', 'standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Amy', 'LanguageCode': 'en-GB', 'LanguageName': 'British English', 'Name': 'Amy',
|
||||||
|
'SupportedEngines': ['neural', 'standard']}]
|
||||||
|
EN_GB_WLS_VOICES = [
|
||||||
|
{'Gender': 'Male', 'Id': 'Geraint', 'LanguageCode': 'en-GB-WLS', 'LanguageName': 'Welsh English', 'Name': 'Geraint',
|
||||||
|
'SupportedEngines': ['standard']}]
|
||||||
|
EN_AU_VOICES = [{'Gender': 'Male', 'Id': 'Russell', 'LanguageCode': 'en-AU', 'LanguageName': 'Australian English',
|
||||||
|
'Name': 'Russell', 'SupportedEngines': ['standard']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Nicole', 'LanguageCode': 'en-AU', 'LanguageName': 'Australian English',
|
||||||
|
'Name': 'Nicole', 'SupportedEngines': ['standard']}]
|
||||||
|
|
||||||
|
ALL_VOICES = EN_US_VOICES + EN_GB_VOICES + EN_GB_WLS_VOICES + EN_AU_VOICES
|
||||||
|
|
||||||
|
MALE_VOICES = [item for item in ALL_VOICES if item.get('Gender') == 'Male']
|
||||||
|
FEMALE_VOICES = [item for item in ALL_VOICES if item.get('Gender') == 'Female']
|
||||||
|
|
||||||
|
|
||||||
|
class NeuralVoices:
|
||||||
|
NEURAL_EN_US_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Danielle', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Danielle',
|
||||||
|
'SupportedEngines': ['neural']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Gregory', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Gregory',
|
||||||
|
'SupportedEngines': ['neural']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Kevin', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Kevin',
|
||||||
|
'SupportedEngines': ['neural']},
|
||||||
|
{'Gender': 'Female', 'Id': 'Ruth', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Ruth',
|
||||||
|
'SupportedEngines': ['neural']},
|
||||||
|
{'Gender': 'Male', 'Id': 'Stephen', 'LanguageCode': 'en-US', 'LanguageName': 'US English', 'Name': 'Stephen',
|
||||||
|
'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_GB_VOICES = [
|
||||||
|
{'Gender': 'Male', 'Id': 'Arthur', 'LanguageCode': 'en-GB', 'LanguageName': 'British English', 'Name': 'Arthur',
|
||||||
|
'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_AU_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Olivia', 'LanguageCode': 'en-AU', 'LanguageName': 'Australian English',
|
||||||
|
'Name': 'Olivia', 'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_ZA_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Ayanda', 'LanguageCode': 'en-ZA', 'LanguageName': 'South African English',
|
||||||
|
'Name': 'Ayanda', 'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_NZ_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Aria', 'LanguageCode': 'en-NZ', 'LanguageName': 'New Zealand English', 'Name': 'Aria',
|
||||||
|
'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_IN_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Kajal', 'LanguageCode': 'en-IN', 'LanguageName': 'Indian English', 'Name': 'Kajal',
|
||||||
|
'SupportedEngines': ['neural']}]
|
||||||
|
NEURAL_EN_IE_VOICES = [
|
||||||
|
{'Gender': 'Female', 'Id': 'Niamh', 'LanguageCode': 'en-IE', 'LanguageName': 'Irish English', 'Name': 'Niamh',
|
||||||
|
'SupportedEngines': ['neural']}]
|
||||||
|
|
||||||
|
ALL_NEURAL_VOICES = NEURAL_EN_US_VOICES + NEURAL_EN_GB_VOICES + NEURAL_EN_AU_VOICES + NEURAL_EN_ZA_VOICES + NEURAL_EN_NZ_VOICES + NEURAL_EN_IE_VOICES
|
||||||
|
|
||||||
|
MALE_NEURAL_VOICES = [item for item in ALL_NEURAL_VOICES if item.get('Gender') == 'Male']
|
||||||
|
FEMALE_NEURAL_VOICES = [item for item in ALL_NEURAL_VOICES if item.get('Gender') == 'Female']
|
||||||
|
|
||||||
|
|
||||||
|
class EducationalContent:
|
||||||
|
DIFFICULTIES = ["easy", "medium", "hard"]
|
||||||
|
|
||||||
|
MTI_TOPICS = [
|
||||||
|
"Education",
|
||||||
|
"Technology",
|
||||||
|
"Environment",
|
||||||
|
"Health and Fitness",
|
||||||
|
"Engineering",
|
||||||
|
"Work and Careers",
|
||||||
|
"Travel and Tourism",
|
||||||
|
"Culture and Traditions",
|
||||||
|
"Social Issues",
|
||||||
|
"Arts and Entertainment",
|
||||||
|
"Climate Change",
|
||||||
|
"Social Media",
|
||||||
|
"Sustainable Development",
|
||||||
|
"Health Care",
|
||||||
|
"Immigration",
|
||||||
|
"Artificial Intelligence",
|
||||||
|
"Consumerism",
|
||||||
|
"Online Shopping",
|
||||||
|
"Energy",
|
||||||
|
"Oil and Gas",
|
||||||
|
"Poverty and Inequality",
|
||||||
|
"Cultural Diversity",
|
||||||
|
"Democracy and Governance",
|
||||||
|
"Mental Health",
|
||||||
|
"Ethics and Morality",
|
||||||
|
"Population Growth",
|
||||||
|
"Science and Innovation",
|
||||||
|
"Poverty Alleviation",
|
||||||
|
"Cybersecurity and Privacy",
|
||||||
|
"Human Rights",
|
||||||
|
"Food and Agriculture",
|
||||||
|
"Cyberbullying and Online Safety",
|
||||||
|
"Linguistic Diversity",
|
||||||
|
"Urbanization",
|
||||||
|
"Artificial Intelligence in Education",
|
||||||
|
"Youth Empowerment",
|
||||||
|
"Disaster Management",
|
||||||
|
"Mental Health Stigma",
|
||||||
|
"Internet Censorship",
|
||||||
|
"Sustainable Fashion",
|
||||||
|
"Indigenous Rights",
|
||||||
|
"Water Scarcity",
|
||||||
|
"Social Entrepreneurship",
|
||||||
|
"Privacy in the Digital Age",
|
||||||
|
"Sustainable Transportation",
|
||||||
|
"Gender Equality",
|
||||||
|
"Automation and Job Displacement",
|
||||||
|
"Digital Divide",
|
||||||
|
"Education Inequality"
|
||||||
|
]
|
||||||
|
TOPICS = [
|
||||||
|
"Art and Creativity",
|
||||||
|
"History of Ancient Civilizations",
|
||||||
|
"Environmental Conservation",
|
||||||
|
"Space Exploration",
|
||||||
|
"Artificial Intelligence",
|
||||||
|
"Climate Change",
|
||||||
|
"The Human Brain",
|
||||||
|
"Renewable Energy",
|
||||||
|
"Cultural Diversity",
|
||||||
|
"Modern Technology Trends",
|
||||||
|
"Sustainable Agriculture",
|
||||||
|
"Natural Disasters",
|
||||||
|
"Cybersecurity",
|
||||||
|
"Philosophy of Ethics",
|
||||||
|
"Robotics",
|
||||||
|
"Health and Wellness",
|
||||||
|
"Literature and Classics",
|
||||||
|
"World Geography",
|
||||||
|
"Social Media Impact",
|
||||||
|
"Food Sustainability",
|
||||||
|
"Economics and Markets",
|
||||||
|
"Human Evolution",
|
||||||
|
"Political Systems",
|
||||||
|
"Mental Health Awareness",
|
||||||
|
"Quantum Physics",
|
||||||
|
"Biodiversity",
|
||||||
|
"Education Reform",
|
||||||
|
"Animal Rights",
|
||||||
|
"The Industrial Revolution",
|
||||||
|
"Future of Work",
|
||||||
|
"Film and Cinema",
|
||||||
|
"Genetic Engineering",
|
||||||
|
"Climate Policy",
|
||||||
|
"Space Travel",
|
||||||
|
"Renewable Energy Sources",
|
||||||
|
"Cultural Heritage Preservation",
|
||||||
|
"Modern Art Movements",
|
||||||
|
"Sustainable Transportation",
|
||||||
|
"The History of Medicine",
|
||||||
|
"Artificial Neural Networks",
|
||||||
|
"Climate Adaptation",
|
||||||
|
"Philosophy of Existence",
|
||||||
|
"Augmented Reality",
|
||||||
|
"Yoga and Meditation",
|
||||||
|
"Literary Genres",
|
||||||
|
"World Oceans",
|
||||||
|
"Social Networking",
|
||||||
|
"Sustainable Fashion",
|
||||||
|
"Prehistoric Era",
|
||||||
|
"Democracy and Governance",
|
||||||
|
"Postcolonial Literature",
|
||||||
|
"Geopolitics",
|
||||||
|
"Psychology and Behavior",
|
||||||
|
"Nanotechnology",
|
||||||
|
"Endangered Species",
|
||||||
|
"Education Technology",
|
||||||
|
"Renaissance Art",
|
||||||
|
"Renewable Energy Policy",
|
||||||
|
"Modern Architecture",
|
||||||
|
"Climate Resilience",
|
||||||
|
"Artificial Life",
|
||||||
|
"Fitness and Nutrition",
|
||||||
|
"Classic Literature Adaptations",
|
||||||
|
"Ethical Dilemmas",
|
||||||
|
"Internet of Things (IoT)",
|
||||||
|
"Meditation Practices",
|
||||||
|
"Literary Symbolism",
|
||||||
|
"Marine Conservation",
|
||||||
|
"Sustainable Tourism",
|
||||||
|
"Ancient Philosophy",
|
||||||
|
"Cold War Era",
|
||||||
|
"Behavioral Economics",
|
||||||
|
"Space Colonization",
|
||||||
|
"Clean Energy Initiatives",
|
||||||
|
"Cultural Exchange",
|
||||||
|
"Modern Sculpture",
|
||||||
|
"Climate Mitigation",
|
||||||
|
"Mindfulness",
|
||||||
|
"Literary Criticism",
|
||||||
|
"Wildlife Conservation",
|
||||||
|
"Renewable Energy Innovations",
|
||||||
|
"History of Mathematics",
|
||||||
|
"Human-Computer Interaction",
|
||||||
|
"Global Health",
|
||||||
|
"Cultural Appropriation",
|
||||||
|
"Traditional cuisine and culinary arts",
|
||||||
|
"Local music and dance traditions",
|
||||||
|
"History of the region and historical landmarks",
|
||||||
|
"Traditional crafts and artisanal skills",
|
||||||
|
"Wildlife and conservation efforts",
|
||||||
|
"Local sports and athletic competitions",
|
||||||
|
"Fashion trends and clothing styles",
|
||||||
|
"Education systems and advancements",
|
||||||
|
"Healthcare services and medical innovations",
|
||||||
|
"Family values and social dynamics",
|
||||||
|
"Travel destinations and tourist attractions",
|
||||||
|
"Environmental sustainability projects",
|
||||||
|
"Technological developments and innovations",
|
||||||
|
"Entrepreneurship and business ventures",
|
||||||
|
"Youth empowerment initiatives",
|
||||||
|
"Art exhibitions and cultural events",
|
||||||
|
"Philanthropy and community development projects"
|
||||||
|
]
|
||||||
|
|
||||||
|
TWO_PEOPLE_SCENARIOS = [
|
||||||
|
"Booking a table at a restaurant",
|
||||||
|
"Making a doctor's appointment",
|
||||||
|
"Asking for directions to a tourist attraction",
|
||||||
|
"Inquiring about public transportation options",
|
||||||
|
"Discussing weekend plans with a friend",
|
||||||
|
"Ordering food at a café",
|
||||||
|
"Renting a bicycle for a day",
|
||||||
|
"Arranging a meeting with a colleague",
|
||||||
|
"Talking to a real estate agent about renting an apartment",
|
||||||
|
"Discussing travel plans for an upcoming vacation",
|
||||||
|
"Checking the availability of a hotel room",
|
||||||
|
"Talking to a car rental service",
|
||||||
|
"Asking for recommendations at a library",
|
||||||
|
"Inquiring about opening hours at a museum",
|
||||||
|
"Discussing the weather forecast",
|
||||||
|
"Shopping for groceries",
|
||||||
|
"Renting a movie from a video store",
|
||||||
|
"Booking a flight ticket",
|
||||||
|
"Discussing a school assignment with a classmate",
|
||||||
|
"Making a reservation for a spa appointment",
|
||||||
|
"Talking to a customer service representative about a product issue",
|
||||||
|
"Discussing household chores with a family member",
|
||||||
|
"Planning a surprise party for a friend",
|
||||||
|
"Talking to a coworker about a project deadline",
|
||||||
|
"Inquiring about a gym membership",
|
||||||
|
"Discussing the menu options at a fast-food restaurant",
|
||||||
|
"Talking to a neighbor about a community event",
|
||||||
|
"Asking for help with computer problems",
|
||||||
|
"Discussing a recent sports game with a sports enthusiast",
|
||||||
|
"Talking to a pet store employee about buying a pet",
|
||||||
|
"Asking for information about a local farmer's market",
|
||||||
|
"Discussing the details of a home renovation project",
|
||||||
|
"Talking to a coworker about office supplies",
|
||||||
|
"Making plans for a family picnic",
|
||||||
|
"Inquiring about admission requirements at a university",
|
||||||
|
"Discussing the features of a new smartphone with a salesperson",
|
||||||
|
"Talking to a mechanic about car repairs",
|
||||||
|
"Making arrangements for a child's birthday party",
|
||||||
|
"Discussing a new diet plan with a nutritionist",
|
||||||
|
"Asking for information about a music concert",
|
||||||
|
"Talking to a hairdresser about getting a haircut",
|
||||||
|
"Inquiring about a language course at a language school",
|
||||||
|
"Discussing plans for a weekend camping trip",
|
||||||
|
"Talking to a bank teller about opening a new account",
|
||||||
|
"Ordering a drink at a coffee shop",
|
||||||
|
"Discussing a new book with a book club member",
|
||||||
|
"Talking to a librarian about library services",
|
||||||
|
"Asking for advice on finding a job",
|
||||||
|
"Discussing plans for a garden makeover with a landscaper",
|
||||||
|
"Talking to a travel agent about a cruise vacation",
|
||||||
|
"Inquiring about a fitness class at a gym",
|
||||||
|
"Ordering flowers for a special occasion",
|
||||||
|
"Discussing a new exercise routine with a personal trainer",
|
||||||
|
"Talking to a teacher about a child's progress in school",
|
||||||
|
"Asking for information about a local art exhibition",
|
||||||
|
"Discussing a home improvement project with a contractor",
|
||||||
|
"Talking to a babysitter about childcare arrangements",
|
||||||
|
"Making arrangements for a car service appointment",
|
||||||
|
"Inquiring about a photography workshop at a studio",
|
||||||
|
"Discussing plans for a family reunion with a relative",
|
||||||
|
"Talking to a tech support representative about computer issues",
|
||||||
|
"Asking for recommendations on pet grooming services",
|
||||||
|
"Discussing weekend plans with a significant other",
|
||||||
|
"Talking to a counselor about personal issues",
|
||||||
|
"Inquiring about a music lesson with a music teacher",
|
||||||
|
"Ordering a pizza for delivery",
|
||||||
|
"Making a reservation for a taxi",
|
||||||
|
"Discussing a new recipe with a chef",
|
||||||
|
"Talking to a fitness trainer about weight loss goals",
|
||||||
|
"Inquiring about a dance class at a dance studio",
|
||||||
|
"Ordering a meal at a food truck",
|
||||||
|
"Discussing plans for a weekend getaway with a partner",
|
||||||
|
"Talking to a florist about wedding flower arrangements",
|
||||||
|
"Asking for advice on home decorating",
|
||||||
|
"Discussing plans for a charity fundraiser event",
|
||||||
|
"Talking to a pet sitter about taking care of pets",
|
||||||
|
"Making arrangements for a spa day with a friend",
|
||||||
|
"Asking for recommendations on home improvement stores",
|
||||||
|
"Discussing weekend plans with a travel enthusiast",
|
||||||
|
"Talking to a car mechanic about car maintenance",
|
||||||
|
"Inquiring about a cooking class at a culinary school",
|
||||||
|
"Ordering a sandwich at a deli",
|
||||||
|
"Discussing plans for a family holiday party",
|
||||||
|
"Talking to a personal assistant about organizing tasks",
|
||||||
|
"Asking for information about a local theater production",
|
||||||
|
"Discussing a new DIY project with a home improvement expert",
|
||||||
|
"Talking to a wine expert about wine pairing",
|
||||||
|
"Making arrangements for a pet adoption",
|
||||||
|
"Asking for advice on planning a wedding"
|
||||||
|
]
|
||||||
|
|
||||||
|
SOCIAL_MONOLOGUE_CONTEXTS = [
|
||||||
|
"A guided tour of a historical museum",
|
||||||
|
"An introduction to a new city for tourists",
|
||||||
|
"An orientation session for new university students",
|
||||||
|
"A safety briefing for airline passengers",
|
||||||
|
"An explanation of the process of recycling",
|
||||||
|
"A lecture on the benefits of a healthy diet",
|
||||||
|
"A talk on the importance of time management",
|
||||||
|
"A monologue about wildlife conservation",
|
||||||
|
"An overview of local public transportation options",
|
||||||
|
"A presentation on the history of cinema",
|
||||||
|
"An introduction to the art of photography",
|
||||||
|
"A discussion about the effects of climate change",
|
||||||
|
"An overview of different types of cuisine",
|
||||||
|
"A lecture on the principles of financial planning",
|
||||||
|
"A monologue about sustainable energy sources",
|
||||||
|
"An explanation of the process of online shopping",
|
||||||
|
"A guided tour of a botanical garden",
|
||||||
|
"An introduction to a local wildlife sanctuary",
|
||||||
|
"A safety briefing for hikers in a national park",
|
||||||
|
"A talk on the benefits of physical exercise",
|
||||||
|
"A lecture on the principles of effective communication",
|
||||||
|
"A monologue about the impact of social media",
|
||||||
|
"An overview of the history of a famous landmark",
|
||||||
|
"An introduction to the world of fashion design",
|
||||||
|
"A discussion about the challenges of global poverty",
|
||||||
|
"An explanation of the process of organic farming",
|
||||||
|
"A presentation on the history of space exploration",
|
||||||
|
"An overview of traditional music from different cultures",
|
||||||
|
"A lecture on the principles of effective leadership",
|
||||||
|
"A monologue about the influence of technology",
|
||||||
|
"A guided tour of a famous archaeological site",
|
||||||
|
"An introduction to a local wildlife rehabilitation center",
|
||||||
|
"A safety briefing for visitors to a science museum",
|
||||||
|
"A talk on the benefits of learning a new language",
|
||||||
|
"A lecture on the principles of architectural design",
|
||||||
|
"A monologue about the impact of renewable energy",
|
||||||
|
"An explanation of the process of online banking",
|
||||||
|
"A presentation on the history of a famous art movement",
|
||||||
|
"An overview of traditional clothing from various regions",
|
||||||
|
"A lecture on the principles of sustainable agriculture",
|
||||||
|
"A discussion about the challenges of urban development",
|
||||||
|
"A monologue about the influence of social norms",
|
||||||
|
"A guided tour of a historical battlefield",
|
||||||
|
"An introduction to a local animal shelter",
|
||||||
|
"A safety briefing for participants in a charity run",
|
||||||
|
"A talk on the benefits of community involvement",
|
||||||
|
"A lecture on the principles of sustainable tourism",
|
||||||
|
"A monologue about the impact of alternative medicine",
|
||||||
|
"An explanation of the process of wildlife tracking",
|
||||||
|
"A presentation on the history of a famous inventor",
|
||||||
|
"An overview of traditional dance forms from different cultures",
|
||||||
|
"A lecture on the principles of ethical business practices",
|
||||||
|
"A discussion about the challenges of healthcare access",
|
||||||
|
"A monologue about the influence of cultural traditions",
|
||||||
|
"A guided tour of a famous lighthouse",
|
||||||
|
"An introduction to a local astronomy observatory",
|
||||||
|
"A safety briefing for participants in a team-building event",
|
||||||
|
"A talk on the benefits of volunteering",
|
||||||
|
"A lecture on the principles of wildlife protection",
|
||||||
|
"A monologue about the impact of space exploration",
|
||||||
|
"An explanation of the process of wildlife photography",
|
||||||
|
"A presentation on the history of a famous musician",
|
||||||
|
"An overview of traditional art forms from different cultures",
|
||||||
|
"A lecture on the principles of effective education",
|
||||||
|
"A discussion about the challenges of sustainable development",
|
||||||
|
"A monologue about the influence of cultural diversity",
|
||||||
|
"A guided tour of a famous national park",
|
||||||
|
"An introduction to a local marine conservation project",
|
||||||
|
"A safety briefing for participants in a hot air balloon ride",
|
||||||
|
"A talk on the benefits of cultural exchange programs",
|
||||||
|
"A lecture on the principles of wildlife conservation",
|
||||||
|
"A monologue about the impact of technological advancements",
|
||||||
|
"An explanation of the process of wildlife rehabilitation",
|
||||||
|
"A presentation on the history of a famous explorer",
|
||||||
|
"A lecture on the principles of effective marketing",
|
||||||
|
"A discussion about the challenges of environmental sustainability",
|
||||||
|
"A monologue about the influence of social entrepreneurship",
|
||||||
|
"A guided tour of a famous historical estate",
|
||||||
|
"An introduction to a local marine life research center",
|
||||||
|
"A safety briefing for participants in a zip-lining adventure",
|
||||||
|
"A talk on the benefits of cultural preservation",
|
||||||
|
"A lecture on the principles of wildlife ecology",
|
||||||
|
"A monologue about the impact of space technology",
|
||||||
|
"An explanation of the process of wildlife conservation",
|
||||||
|
"A presentation on the history of a famous scientist",
|
||||||
|
"An overview of traditional crafts and artisans from different cultures",
|
||||||
|
"A lecture on the principles of effective intercultural communication"
|
||||||
|
]
|
||||||
|
|
||||||
|
FOUR_PEOPLE_SCENARIOS = [
|
||||||
|
"A university lecture on history",
|
||||||
|
"A physics class discussing Newton's laws",
|
||||||
|
"A medical school seminar on anatomy",
|
||||||
|
"A training session on computer programming",
|
||||||
|
"A business school lecture on marketing strategies",
|
||||||
|
"A chemistry lab experiment and discussion",
|
||||||
|
"A language class practicing conversational skills",
|
||||||
|
"A workshop on creative writing techniques",
|
||||||
|
"A high school math lesson on calculus",
|
||||||
|
"A training program for customer service representatives",
|
||||||
|
"A lecture on environmental science and sustainability",
|
||||||
|
"A psychology class exploring human behavior",
|
||||||
|
"A music theory class analyzing compositions",
|
||||||
|
"A nursing school simulation for patient care",
|
||||||
|
"A computer science class on algorithms",
|
||||||
|
"A workshop on graphic design principles",
|
||||||
|
"A law school lecture on constitutional law",
|
||||||
|
"A geology class studying rock formations",
|
||||||
|
"A vocational training program for electricians",
|
||||||
|
"A history seminar focusing on ancient civilizations",
|
||||||
|
"A biology class dissecting specimens",
|
||||||
|
"A financial literacy course for adults",
|
||||||
|
"A literature class discussing classic novels",
|
||||||
|
"A training session for emergency response teams",
|
||||||
|
"A sociology lecture on social inequality",
|
||||||
|
"An art class exploring different painting techniques",
|
||||||
|
"A medical school seminar on diagnosis",
|
||||||
|
"A programming bootcamp teaching web development",
|
||||||
|
"An economics class analyzing market trends",
|
||||||
|
"A chemistry lab experiment on chemical reactions",
|
||||||
|
"A language class practicing pronunciation",
|
||||||
|
"A workshop on public speaking skills",
|
||||||
|
"A high school physics lesson on electromagnetism",
|
||||||
|
"A training program for IT professionals",
|
||||||
|
"A lecture on climate change and its effects",
|
||||||
|
"A psychology class studying cognitive psychology",
|
||||||
|
"A music class composing original songs",
|
||||||
|
"A nursing school simulation for patient assessment",
|
||||||
|
"A computer science class on data structures",
|
||||||
|
"A workshop on 3D modeling and animation",
|
||||||
|
"A law school lecture on contract law",
|
||||||
|
"A geography class examining world maps",
|
||||||
|
"A vocational training program for plumbers",
|
||||||
|
"A history seminar discussing revolutions",
|
||||||
|
"A biology class exploring genetics",
|
||||||
|
"A financial literacy course for teens",
|
||||||
|
"A literature class analyzing poetry",
|
||||||
|
"A training session for public speaking coaches",
|
||||||
|
"A sociology lecture on cultural diversity",
|
||||||
|
"An art class creating sculptures",
|
||||||
|
"A medical school seminar on surgical techniques",
|
||||||
|
"A programming bootcamp teaching app development",
|
||||||
|
"An economics class on global trade policies",
|
||||||
|
"A chemistry lab experiment on chemical bonding",
|
||||||
|
"A language class discussing idiomatic expressions",
|
||||||
|
"A workshop on conflict resolution",
|
||||||
|
"A high school biology lesson on evolution",
|
||||||
|
"A training program for project managers",
|
||||||
|
"A lecture on renewable energy sources",
|
||||||
|
"A psychology class on abnormal psychology",
|
||||||
|
"A music class rehearsing for a performance",
|
||||||
|
"A nursing school simulation for emergency response",
|
||||||
|
"A computer science class on cybersecurity",
|
||||||
|
"A workshop on digital marketing strategies",
|
||||||
|
"A law school lecture on intellectual property",
|
||||||
|
"A geology class analyzing seismic activity",
|
||||||
|
"A vocational training program for carpenters",
|
||||||
|
"A history seminar on the Renaissance",
|
||||||
|
"A chemistry class synthesizing compounds",
|
||||||
|
"A financial literacy course for seniors",
|
||||||
|
"A literature class interpreting Shakespearean plays",
|
||||||
|
"A training session for negotiation skills",
|
||||||
|
"A sociology lecture on urbanization",
|
||||||
|
"An art class creating digital art",
|
||||||
|
"A medical school seminar on patient communication",
|
||||||
|
"A programming bootcamp teaching mobile app development",
|
||||||
|
"An economics class on fiscal policy",
|
||||||
|
"A physics lab experiment on electromagnetism",
|
||||||
|
"A language class on cultural immersion",
|
||||||
|
"A workshop on time management",
|
||||||
|
"A high school chemistry lesson on stoichiometry",
|
||||||
|
"A training program for HR professionals",
|
||||||
|
"A lecture on space exploration and astronomy",
|
||||||
|
"A psychology class on human development",
|
||||||
|
"A music class practicing for a recital",
|
||||||
|
"A nursing school simulation for triage",
|
||||||
|
"A computer science class on web development frameworks",
|
||||||
|
"A workshop on team-building exercises",
|
||||||
|
"A law school lecture on criminal law",
|
||||||
|
"A geography class studying world cultures",
|
||||||
|
"A vocational training program for HVAC technicians",
|
||||||
|
"A history seminar on ancient civilizations",
|
||||||
|
"A biology class examining ecosystems",
|
||||||
|
"A financial literacy course for entrepreneurs",
|
||||||
|
"A literature class analyzing modern literature",
|
||||||
|
"A training session for leadership skills",
|
||||||
|
"A sociology lecture on gender studies",
|
||||||
|
"An art class exploring multimedia art",
|
||||||
|
"A medical school seminar on patient diagnosis",
|
||||||
|
"A programming bootcamp teaching software architecture"
|
||||||
|
]
|
||||||
|
|
||||||
|
ACADEMIC_SUBJECTS = [
|
||||||
|
"Astrophysics",
|
||||||
|
"Microbiology",
|
||||||
|
"Political Science",
|
||||||
|
"Environmental Science",
|
||||||
|
"Literature",
|
||||||
|
"Biochemistry",
|
||||||
|
"Sociology",
|
||||||
|
"Art History",
|
||||||
|
"Geology",
|
||||||
|
"Economics",
|
||||||
|
"Psychology",
|
||||||
|
"History of Architecture",
|
||||||
|
"Linguistics",
|
||||||
|
"Neurobiology",
|
||||||
|
"Anthropology",
|
||||||
|
"Quantum Mechanics",
|
||||||
|
"Urban Planning",
|
||||||
|
"Philosophy",
|
||||||
|
"Marine Biology",
|
||||||
|
"International Relations",
|
||||||
|
"Medieval History",
|
||||||
|
"Geophysics",
|
||||||
|
"Finance",
|
||||||
|
"Educational Psychology",
|
||||||
|
"Graphic Design",
|
||||||
|
"Paleontology",
|
||||||
|
"Macroeconomics",
|
||||||
|
"Cognitive Psychology",
|
||||||
|
"Renaissance Art",
|
||||||
|
"Archaeology",
|
||||||
|
"Microeconomics",
|
||||||
|
"Social Psychology",
|
||||||
|
"Contemporary Art",
|
||||||
|
"Meteorology",
|
||||||
|
"Political Philosophy",
|
||||||
|
"Space Exploration",
|
||||||
|
"Cognitive Science",
|
||||||
|
"Classical Music",
|
||||||
|
"Oceanography",
|
||||||
|
"Public Health",
|
||||||
|
"Gender Studies",
|
||||||
|
"Baroque Art",
|
||||||
|
"Volcanology",
|
||||||
|
"Business Ethics",
|
||||||
|
"Music Composition",
|
||||||
|
"Environmental Policy",
|
||||||
|
"Media Studies",
|
||||||
|
"Ancient History",
|
||||||
|
"Seismology",
|
||||||
|
"Marketing",
|
||||||
|
"Human Development",
|
||||||
|
"Modern Art",
|
||||||
|
"Astronomy",
|
||||||
|
"International Law",
|
||||||
|
"Developmental Psychology",
|
||||||
|
"Film Studies",
|
||||||
|
"American History",
|
||||||
|
"Soil Science",
|
||||||
|
"Entrepreneurship",
|
||||||
|
"Clinical Psychology",
|
||||||
|
"Contemporary Dance",
|
||||||
|
"Space Physics",
|
||||||
|
"Political Economy",
|
||||||
|
"Cognitive Neuroscience",
|
||||||
|
"20th Century Literature",
|
||||||
|
"Public Administration",
|
||||||
|
"European History",
|
||||||
|
"Atmospheric Science",
|
||||||
|
"Supply Chain Management",
|
||||||
|
"Social Work",
|
||||||
|
"Japanese Literature",
|
||||||
|
"Planetary Science",
|
||||||
|
"Labor Economics",
|
||||||
|
"Industrial-Organizational Psychology",
|
||||||
|
"French Philosophy",
|
||||||
|
"Biogeochemistry",
|
||||||
|
"Strategic Management",
|
||||||
|
"Educational Sociology",
|
||||||
|
"Postmodern Literature",
|
||||||
|
"Public Relations",
|
||||||
|
"Middle Eastern History",
|
||||||
|
"Oceanography",
|
||||||
|
"International Development",
|
||||||
|
"Human Resources Management",
|
||||||
|
"Educational Leadership",
|
||||||
|
"Russian Literature",
|
||||||
|
"Quantum Chemistry",
|
||||||
|
"Environmental Economics",
|
||||||
|
"Environmental Psychology",
|
||||||
|
"Ancient Philosophy",
|
||||||
|
"Immunology",
|
||||||
|
"Comparative Politics",
|
||||||
|
"Child Development",
|
||||||
|
"Fashion Design",
|
||||||
|
"Geological Engineering",
|
||||||
|
"Macroeconomic Policy",
|
||||||
|
"Media Psychology",
|
||||||
|
"Byzantine Art",
|
||||||
|
"Ecology",
|
||||||
|
"International Business"
|
||||||
|
]
|
||||||
152
app/configs/dependency_injection.py
Normal file
152
app/configs/dependency_injection.py
Normal file
@@ -0,0 +1,152 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
|
||||||
|
from dependency_injector import providers, containers
|
||||||
|
from firebase_admin import credentials
|
||||||
|
from motor.motor_asyncio import AsyncIOMotorClient
|
||||||
|
from openai import AsyncOpenAI
|
||||||
|
from httpx import AsyncClient as HTTPClient
|
||||||
|
from dotenv import load_dotenv
|
||||||
|
from sentence_transformers import SentenceTransformer
|
||||||
|
|
||||||
|
from app.repositories.impl import *
|
||||||
|
from app.services.impl import *
|
||||||
|
from app.controllers.impl import *
|
||||||
|
|
||||||
|
load_dotenv()
|
||||||
|
|
||||||
|
|
||||||
|
class DependencyInjector:
|
||||||
|
|
||||||
|
def __init__(self, polly_client: any, http_client: HTTPClient, whisper_model: any):
|
||||||
|
self._container = containers.DynamicContainer()
|
||||||
|
self._polly_client = polly_client
|
||||||
|
self._http_client = http_client
|
||||||
|
self._whisper_model = whisper_model
|
||||||
|
|
||||||
|
def inject(self):
|
||||||
|
self._setup_clients()
|
||||||
|
self._setup_third_parties()
|
||||||
|
self._setup_repositories()
|
||||||
|
self._setup_services()
|
||||||
|
self._setup_controllers()
|
||||||
|
self._container.wire(
|
||||||
|
packages=["app"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _setup_clients(self):
|
||||||
|
self._container.openai_client = providers.Singleton(AsyncOpenAI)
|
||||||
|
self._container.polly_client = providers.Object(self._polly_client)
|
||||||
|
self._container.http_client = providers.Object(self._http_client)
|
||||||
|
self._container.whisper_model = providers.Object(self._whisper_model)
|
||||||
|
|
||||||
|
def _setup_third_parties(self):
|
||||||
|
self._container.llm = providers.Factory(OpenAI, client=self._container.openai_client)
|
||||||
|
self._container.stt = providers.Factory(OpenAIWhisper, model=self._container.whisper_model)
|
||||||
|
self._container.tts = providers.Factory(AWSPolly, client=self._container.polly_client)
|
||||||
|
with open('app/services/impl/third_parties/elai/elai_conf.json', 'r') as file:
|
||||||
|
elai_conf = json.load(file)
|
||||||
|
|
||||||
|
self._container.vid_gen = providers.Factory(
|
||||||
|
ELAI, client=self._container.http_client, token=os.getenv("ELAI_TOKEN"), conf=elai_conf
|
||||||
|
)
|
||||||
|
self._container.ai_detector = providers.Factory(
|
||||||
|
GPTZero, client=self._container.http_client, gpt_zero_key=os.getenv("GPT_ZERO_API_KEY")
|
||||||
|
)
|
||||||
|
|
||||||
|
def _setup_repositories(self):
|
||||||
|
cred = credentials.Certificate(os.getenv("GOOGLE_APPLICATION_CREDENTIALS"))
|
||||||
|
firebase_token = cred.get_access_token().access_token
|
||||||
|
|
||||||
|
self._container.document_store = providers.Object(
|
||||||
|
AsyncIOMotorClient(os.getenv("MONGODB_URI"))[os.getenv("MONGODB_DB")]
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.firebase_instance = providers.Factory(
|
||||||
|
FirebaseStorage,
|
||||||
|
client=self._container.http_client, token=firebase_token, bucket=os.getenv("FIREBASE_BUCKET")
|
||||||
|
)
|
||||||
|
|
||||||
|
def _setup_services(self):
|
||||||
|
self._container.listening_service = providers.Factory(
|
||||||
|
ListeningService,
|
||||||
|
llm=self._container.llm,
|
||||||
|
stt=self._container.stt,
|
||||||
|
tts=self._container.tts,
|
||||||
|
file_storage=self._container.firebase_instance,
|
||||||
|
document_store=self._container.document_store
|
||||||
|
)
|
||||||
|
self._container.reading_service = providers.Factory(ReadingService, llm=self._container.llm)
|
||||||
|
|
||||||
|
self._container.speaking_service = providers.Factory(
|
||||||
|
SpeakingService, llm=self._container.llm, vid_gen=self._container.vid_gen,
|
||||||
|
file_storage=self._container.firebase_instance, document_store=self._container.document_store,
|
||||||
|
stt=self._container.stt
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.writing_service = providers.Factory(
|
||||||
|
WritingService, llm=self._container.llm, ai_detector=self._container.ai_detector
|
||||||
|
)
|
||||||
|
|
||||||
|
with open('app/services/impl/exam/level/mc_variants.json', 'r') as file:
|
||||||
|
mc_variants = json.load(file)
|
||||||
|
|
||||||
|
self._container.level_service = providers.Factory(
|
||||||
|
LevelService, llm=self._container.llm, document_store=self._container.document_store,
|
||||||
|
mc_variants=mc_variants, reading_service=self._container.reading_service,
|
||||||
|
writing_service=self._container.writing_service, speaking_service=self._container.speaking_service,
|
||||||
|
listening_service=self._container.listening_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.grade_service = providers.Factory(
|
||||||
|
GradeService, llm=self._container.llm
|
||||||
|
)
|
||||||
|
|
||||||
|
embeddings = SentenceTransformer('all-MiniLM-L6-v2')
|
||||||
|
|
||||||
|
self._container.training_kb = providers.Factory(
|
||||||
|
TrainingContentKnowledgeBase, embeddings=embeddings
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.training_service = providers.Factory(
|
||||||
|
TrainingService, llm=self._container.llm,
|
||||||
|
document_store=self._container.document_store, training_kb=self._container.training_kb
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.user_service = providers.Factory(
|
||||||
|
UserService, document_store=self._container.document_store
|
||||||
|
)
|
||||||
|
|
||||||
|
def _setup_controllers(self):
|
||||||
|
self._container.grade_controller = providers.Factory(
|
||||||
|
GradeController, grade_service=self._container.grade_service,
|
||||||
|
speaking_service=self._container.speaking_service,
|
||||||
|
writing_service=self._container.writing_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.user_controller = providers.Factory(
|
||||||
|
UserController, user_service=self._container.user_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.training_controller = providers.Factory(
|
||||||
|
TrainingController, training_service=self._container.training_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.level_controller = providers.Factory(
|
||||||
|
LevelController, level_service=self._container.level_service
|
||||||
|
)
|
||||||
|
self._container.listening_controller = providers.Factory(
|
||||||
|
ListeningController, listening_service=self._container.listening_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.reading_controller = providers.Factory(
|
||||||
|
ReadingController, reading_service=self._container.reading_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.speaking_controller = providers.Factory(
|
||||||
|
SpeakingController, speaking_service=self._container.speaking_service
|
||||||
|
)
|
||||||
|
|
||||||
|
self._container.writing_controller = providers.Factory(
|
||||||
|
WritingController, writing_service=self._container.writing_service
|
||||||
|
)
|
||||||
7
app/configs/logging/__init__.py
Normal file
7
app/configs/logging/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from .filters import ErrorAndAboveFilter
|
||||||
|
from .queue_handler import QueueListenerHandler
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ErrorAndAboveFilter",
|
||||||
|
"QueueListenerHandler"
|
||||||
|
]
|
||||||
6
app/configs/logging/filters.py
Normal file
6
app/configs/logging/filters.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
import logging
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorAndAboveFilter(logging.Filter):
|
||||||
|
def filter(self, record: logging.LogRecord) -> bool | logging.LogRecord:
|
||||||
|
return record.levelno < logging.ERROR
|
||||||
105
app/configs/logging/formatters.py
Normal file
105
app/configs/logging/formatters.py
Normal file
@@ -0,0 +1,105 @@
|
|||||||
|
import datetime as dt
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
|
||||||
|
LOG_RECORD_BUILTIN_ATTRS = {
|
||||||
|
"args",
|
||||||
|
"asctime",
|
||||||
|
"created",
|
||||||
|
"exc_info",
|
||||||
|
"exc_text",
|
||||||
|
"filename",
|
||||||
|
"funcName",
|
||||||
|
"levelname",
|
||||||
|
"levelno",
|
||||||
|
"lineno",
|
||||||
|
"module",
|
||||||
|
"msecs",
|
||||||
|
"message",
|
||||||
|
"msg",
|
||||||
|
"name",
|
||||||
|
"pathname",
|
||||||
|
"process",
|
||||||
|
"processName",
|
||||||
|
"relativeCreated",
|
||||||
|
"stack_info",
|
||||||
|
"thread",
|
||||||
|
"threadName",
|
||||||
|
"taskName",
|
||||||
|
}
|
||||||
|
|
||||||
|
"""
|
||||||
|
This isn't being used since the app will be run on gcloud run but this can be used for future apps.
|
||||||
|
If you want to test it:
|
||||||
|
|
||||||
|
formatters:
|
||||||
|
|
||||||
|
"json": {
|
||||||
|
"()": "json_formatter.JSONFormatter",
|
||||||
|
"fmt_keys": {
|
||||||
|
"level": "levelname",
|
||||||
|
"message": "message",
|
||||||
|
"timestamp": "timestamp",
|
||||||
|
"logger": "name",
|
||||||
|
"module": "module",
|
||||||
|
"function": "funcName",
|
||||||
|
"line": "lineno",
|
||||||
|
"thread_name": "threadName"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
handlers:
|
||||||
|
|
||||||
|
"file_json": {
|
||||||
|
"class": "logging.handlers.RotatingFileHandler",
|
||||||
|
"level": "DEBUG",
|
||||||
|
"formatter": "json",
|
||||||
|
"filename": "logs/log",
|
||||||
|
"maxBytes": 1000000,
|
||||||
|
"backupCount": 3
|
||||||
|
}
|
||||||
|
|
||||||
|
and add "cfg://handlers.file_json" to queue handler
|
||||||
|
"""
|
||||||
|
|
||||||
|
# From this video https://www.youtube.com/watch?v=9L77QExPmI0
|
||||||
|
# Src here: https://github.com/mCodingLLC/VideosSampleCode/blob/master/videos/135_modern_logging/mylogger.py
|
||||||
|
class JSONFormatter(logging.Formatter):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
*,
|
||||||
|
fmt_keys: dict[str, str] | None = None,
|
||||||
|
):
|
||||||
|
super().__init__()
|
||||||
|
self.fmt_keys = fmt_keys if fmt_keys is not None else {}
|
||||||
|
|
||||||
|
def format(self, record: logging.LogRecord) -> str:
|
||||||
|
message = self._prepare_log_dict(record)
|
||||||
|
return json.dumps(message, default=str)
|
||||||
|
|
||||||
|
def _prepare_log_dict(self, record: logging.LogRecord):
|
||||||
|
always_fields = {
|
||||||
|
"message": record.getMessage(),
|
||||||
|
"timestamp": dt.datetime.fromtimestamp(
|
||||||
|
record.created, tz=dt.timezone.utc
|
||||||
|
).isoformat(),
|
||||||
|
}
|
||||||
|
if record.exc_info is not None:
|
||||||
|
always_fields["exc_info"] = self.formatException(record.exc_info)
|
||||||
|
|
||||||
|
if record.stack_info is not None:
|
||||||
|
always_fields["stack_info"] = self.formatStack(record.stack_info)
|
||||||
|
|
||||||
|
message = {
|
||||||
|
key: msg_val
|
||||||
|
if (msg_val := always_fields.pop(val, None)) is not None
|
||||||
|
else getattr(record, val)
|
||||||
|
for key, val in self.fmt_keys.items()
|
||||||
|
}
|
||||||
|
message.update(always_fields)
|
||||||
|
|
||||||
|
for key, val in record.__dict__.items():
|
||||||
|
if key not in LOG_RECORD_BUILTIN_ATTRS:
|
||||||
|
message[key] = val
|
||||||
|
|
||||||
|
return message
|
||||||
53
app/configs/logging/logging_config.json
Normal file
53
app/configs/logging/logging_config.json
Normal file
@@ -0,0 +1,53 @@
|
|||||||
|
{
|
||||||
|
"version": 1,
|
||||||
|
"objects": {
|
||||||
|
"queue": {
|
||||||
|
"class": "queue.Queue",
|
||||||
|
"maxsize": 1000
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"disable_existing_loggers": false,
|
||||||
|
"formatters": {
|
||||||
|
"simple": {
|
||||||
|
"format": "[%(levelname)s] (%(module)s|L: %(lineno)d) %(asctime)s: %(message)s",
|
||||||
|
"datefmt": "%Y-%m-%dT%H:%M:%S%z"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"filters": {
|
||||||
|
"error_and_above": {
|
||||||
|
"()": "app.configs.logging.ErrorAndAboveFilter"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": "INFO",
|
||||||
|
"formatter": "simple",
|
||||||
|
"stream": "ext://sys.stdout",
|
||||||
|
"filters": ["error_and_above"]
|
||||||
|
},
|
||||||
|
"error": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": "ERROR",
|
||||||
|
"formatter": "simple",
|
||||||
|
"stream": "ext://sys.stderr"
|
||||||
|
},
|
||||||
|
"queue_handler": {
|
||||||
|
"class": "app.configs.logging.QueueListenerHandler",
|
||||||
|
"handlers": [
|
||||||
|
"cfg://handlers.console",
|
||||||
|
"cfg://handlers.error"
|
||||||
|
],
|
||||||
|
"queue": "cfg://objects.queue",
|
||||||
|
"respect_handler_level": true
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"root": {
|
||||||
|
"level": "DEBUG",
|
||||||
|
"handlers": [
|
||||||
|
"queue_handler"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
61
app/configs/logging/queue_handler.py
Normal file
61
app/configs/logging/queue_handler.py
Normal file
@@ -0,0 +1,61 @@
|
|||||||
|
from logging.config import ConvertingList, ConvertingDict, valid_ident
|
||||||
|
from logging.handlers import QueueHandler, QueueListener
|
||||||
|
from queue import Queue
|
||||||
|
import atexit
|
||||||
|
|
||||||
|
|
||||||
|
class QueueHnadlerHelper:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_handlers(l):
|
||||||
|
if not isinstance(l, ConvertingList):
|
||||||
|
return l
|
||||||
|
|
||||||
|
# Indexing the list performs the evaluation.
|
||||||
|
return [l[i] for i in range(len(l))]
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_queue(q):
|
||||||
|
if not isinstance(q, ConvertingDict):
|
||||||
|
return q
|
||||||
|
if '__resolved_value__' in q:
|
||||||
|
return q['__resolved_value__']
|
||||||
|
|
||||||
|
cname = q.pop('class')
|
||||||
|
klass = q.configurator.resolve(cname)
|
||||||
|
props = q.pop('.', None)
|
||||||
|
kwargs = {k: q[k] for k in q if valid_ident(k)}
|
||||||
|
result = klass(**kwargs)
|
||||||
|
if props:
|
||||||
|
for name, value in props.items():
|
||||||
|
setattr(result, name, value)
|
||||||
|
|
||||||
|
q['__resolved_value__'] = result
|
||||||
|
return result
|
||||||
|
|
||||||
|
|
||||||
|
# The guy from this video https://www.youtube.com/watch?v=9L77QExPmI0 is using logging features only available in 3.12
|
||||||
|
# This article had the class required to build the queue handler in 3.11
|
||||||
|
# https://rob-blackbourn.medium.com/how-to-use-python-logging-queuehandler-with-dictconfig-1e8b1284e27a
|
||||||
|
class QueueListenerHandler(QueueHandler):
|
||||||
|
|
||||||
|
def __init__(self, handlers, respect_handler_level=False, auto_run=True, queue=Queue(-1)):
|
||||||
|
queue = QueueHnadlerHelper.resolve_queue(queue)
|
||||||
|
super().__init__(queue)
|
||||||
|
handlers = QueueHnadlerHelper.resolve_handlers(handlers)
|
||||||
|
self._listener = QueueListener(
|
||||||
|
self.queue,
|
||||||
|
*handlers,
|
||||||
|
respect_handler_level=respect_handler_level)
|
||||||
|
if auto_run:
|
||||||
|
self.start()
|
||||||
|
atexit.register(self.stop)
|
||||||
|
|
||||||
|
def start(self):
|
||||||
|
self._listener.start()
|
||||||
|
|
||||||
|
def stop(self):
|
||||||
|
self._listener.stop()
|
||||||
|
|
||||||
|
def emit(self, record):
|
||||||
|
return super().emit(record)
|
||||||
File diff suppressed because it is too large
Load Diff
0
app/controllers/__init__.py
Normal file
0
app/controllers/__init__.py
Normal file
19
app/controllers/abc/__init__.py
Normal file
19
app/controllers/abc/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from .level import ILevelController
|
||||||
|
from .listening import IListeningController
|
||||||
|
from .reading import IReadingController
|
||||||
|
from .writing import IWritingController
|
||||||
|
from .speaking import ISpeakingController
|
||||||
|
from .grade import IGradeController
|
||||||
|
from .training import ITrainingController
|
||||||
|
from .user import IUserController
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"IListeningController",
|
||||||
|
"IReadingController",
|
||||||
|
"IWritingController",
|
||||||
|
"ISpeakingController",
|
||||||
|
"ILevelController",
|
||||||
|
"IGradeController",
|
||||||
|
"ITrainingController",
|
||||||
|
"IUserController",
|
||||||
|
]
|
||||||
22
app/controllers/abc/grade.py
Normal file
22
app/controllers/abc/grade.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
class IGradeController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_writing_task(self, task: int, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_speaking_task(self, task: int, answers: List[Dict]) -> Dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_short_answers(self, data: Dict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grading_summary(self, data: Dict):
|
||||||
|
pass
|
||||||
|
|
||||||
27
app/controllers/abc/level.py
Normal file
27
app/controllers/abc/level.py
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
|
||||||
|
class ILevelController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_exercises(self, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_level_exam(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_level_utas(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def upload_level(self, file: UploadFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_custom_level(self, data: Dict):
|
||||||
|
pass
|
||||||
21
app/controllers/abc/listening.py
Normal file
21
app/controllers/abc/listening.py
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
|
class IListeningController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_listening_dialog(self, section_id: int, topic: str, difficulty: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_listening_question(self, section: int, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_mp3(self, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def save_listening(self, data):
|
||||||
|
pass
|
||||||
20
app/controllers/abc/reading.py
Normal file
20
app/controllers/abc/reading.py
Normal file
@@ -0,0 +1,20 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
|
||||||
|
|
||||||
|
class IReadingController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def import_exam(self, exercises: UploadFile, solutions: UploadFile = None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_passage(self, passage: int, topic: Optional[str], word_count: Optional[int]):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_exercises(self, passage: int, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
25
app/controllers/abc/speaking.py
Normal file
25
app/controllers/abc/speaking.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import BackgroundTasks
|
||||||
|
|
||||||
|
|
||||||
|
class ISpeakingController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_speaking_part(self, task: int, topic: str, difficulty: str, second_topic: Optional[str] = None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def save_speaking(self, data, background_tasks: BackgroundTasks):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_video(
|
||||||
|
self, part: int, avatar: str, topic: str, questions: list[str],
|
||||||
|
*,
|
||||||
|
second_topic: Optional[str] = None,
|
||||||
|
prompts: Optional[list[str]] = None,
|
||||||
|
suffix: Optional[str] = None,
|
||||||
|
):
|
||||||
|
pass
|
||||||
12
app/controllers/abc/training.py
Normal file
12
app/controllers/abc/training.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class ITrainingController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def fetch_tips(self, data):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_training_content(self, data):
|
||||||
|
pass
|
||||||
10
app/controllers/abc/user.py
Normal file
10
app/controllers/abc/user.py
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
from app.dtos.user_batch import BatchUsersDTO
|
||||||
|
|
||||||
|
|
||||||
|
class IUserController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def batch_import(self, batch: BatchUsersDTO):
|
||||||
|
pass
|
||||||
8
app/controllers/abc/writing.py
Normal file
8
app/controllers/abc/writing.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class IWritingController(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_writing_task_general_question(self, task: int, topic: str, difficulty: str):
|
||||||
|
pass
|
||||||
19
app/controllers/impl/__init__.py
Normal file
19
app/controllers/impl/__init__.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from .level import LevelController
|
||||||
|
from .listening import ListeningController
|
||||||
|
from .reading import ReadingController
|
||||||
|
from .speaking import SpeakingController
|
||||||
|
from .writing import WritingController
|
||||||
|
from .training import TrainingController
|
||||||
|
from .grade import GradeController
|
||||||
|
from .user import UserController
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"LevelController",
|
||||||
|
"ListeningController",
|
||||||
|
"ReadingController",
|
||||||
|
"SpeakingController",
|
||||||
|
"WritingController",
|
||||||
|
"TrainingController",
|
||||||
|
"GradeController",
|
||||||
|
"UserController"
|
||||||
|
]
|
||||||
54
app/controllers/impl/grade.py
Normal file
54
app/controllers/impl/grade.py
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from app.configs.constants import FilePaths
|
||||||
|
from app.controllers.abc import IGradeController
|
||||||
|
from app.dtos.writing import WritingGradeTaskDTO
|
||||||
|
from app.helpers import FileHelper
|
||||||
|
from app.services.abc import ISpeakingService, IWritingService, IGradeService
|
||||||
|
from app.utils import handle_exception
|
||||||
|
|
||||||
|
|
||||||
|
class GradeController(IGradeController):
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
grade_service: IGradeService,
|
||||||
|
speaking_service: ISpeakingService,
|
||||||
|
writing_service: IWritingService
|
||||||
|
):
|
||||||
|
self._service = grade_service
|
||||||
|
self._speaking_service = speaking_service
|
||||||
|
self._writing_service = writing_service
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def grade_writing_task(self, task: int, data: WritingGradeTaskDTO):
|
||||||
|
return await self._writing_service.grade_writing_task(task, data.question, data.answer)
|
||||||
|
|
||||||
|
@handle_exception(400)
|
||||||
|
async def grade_speaking_task(self, task: int, answers: List[Dict]) -> Dict:
|
||||||
|
FileHelper.delete_files_older_than_one_day(FilePaths.AUDIO_FILES_PATH)
|
||||||
|
return await self._speaking_service.grade_speaking_task(task, answers)
|
||||||
|
|
||||||
|
async def grade_short_answers(self, data: Dict):
|
||||||
|
return await self._service.grade_short_answers(data)
|
||||||
|
|
||||||
|
async def grading_summary(self, data: Dict):
|
||||||
|
section_keys = ['reading', 'listening', 'writing', 'speaking', 'level']
|
||||||
|
extracted_sections = self._extract_existing_sections_from_body(data, section_keys)
|
||||||
|
return await self._service.calculate_grading_summary(extracted_sections)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_existing_sections_from_body(my_dict, keys_to_extract):
|
||||||
|
if 'sections' in my_dict and isinstance(my_dict['sections'], list) and len(my_dict['sections']) > 0:
|
||||||
|
return list(
|
||||||
|
filter(
|
||||||
|
lambda item:
|
||||||
|
'code' in item and
|
||||||
|
item['code'] in keys_to_extract and
|
||||||
|
'grade' in item and
|
||||||
|
'name' in item,
|
||||||
|
my_dict['sections']
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
28
app/controllers/impl/level.py
Normal file
28
app/controllers/impl/level.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from fastapi import UploadFile
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from watchfiles import awatch
|
||||||
|
|
||||||
|
from app.controllers.abc import ILevelController
|
||||||
|
from app.services.abc import ILevelService
|
||||||
|
|
||||||
|
|
||||||
|
class LevelController(ILevelController):
|
||||||
|
|
||||||
|
def __init__(self, level_service: ILevelService):
|
||||||
|
self._service = level_service
|
||||||
|
|
||||||
|
async def generate_exercises(self, dto):
|
||||||
|
return await self._service.generate_exercises(dto)
|
||||||
|
|
||||||
|
async def get_level_exam(self):
|
||||||
|
return await self._service.get_level_exam()
|
||||||
|
|
||||||
|
async def get_level_utas(self):
|
||||||
|
return await self._service.get_level_utas()
|
||||||
|
|
||||||
|
async def upload_level(self, file: UploadFile):
|
||||||
|
return await self._service.upload_level(file)
|
||||||
|
|
||||||
|
async def get_custom_level(self, data: Dict):
|
||||||
|
return await self._service.get_custom_level(data)
|
||||||
29
app/controllers/impl/listening.py
Normal file
29
app/controllers/impl/listening.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from app.controllers.abc import IListeningController
|
||||||
|
from app.dtos.listening import SaveListeningDTO, GenerateListeningExercises, Dialog
|
||||||
|
from app.services.abc import IListeningService
|
||||||
|
from fastapi import Response
|
||||||
|
|
||||||
|
|
||||||
|
class ListeningController(IListeningController):
|
||||||
|
|
||||||
|
def __init__(self, listening_service: IListeningService):
|
||||||
|
self._service = listening_service
|
||||||
|
|
||||||
|
async def generate_listening_dialog(self, section_id: int, topic: str, difficulty: str):
|
||||||
|
return await self._service.generate_listening_dialog(section_id, topic, difficulty)
|
||||||
|
|
||||||
|
async def get_listening_question(self, section: int, dto: GenerateListeningExercises):
|
||||||
|
return await self._service.get_listening_question(section, dto)
|
||||||
|
|
||||||
|
async def generate_mp3(self, dto: Dialog):
|
||||||
|
mp3 = await self._service.generate_mp3(dto)
|
||||||
|
return Response(
|
||||||
|
content=mp3,
|
||||||
|
media_type="audio/mpeg",
|
||||||
|
headers={
|
||||||
|
"Content-Disposition": "attachment;filename=speech.mp3"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def save_listening(self, data: SaveListeningDTO):
|
||||||
|
return await self._service.save_listening(data.parts, data.minTimer, data.difficulty, data.id)
|
||||||
25
app/controllers/impl/reading.py
Normal file
25
app/controllers/impl/reading.py
Normal file
@@ -0,0 +1,25 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
from grpc import services
|
||||||
|
|
||||||
|
from app.controllers.abc import IReadingController
|
||||||
|
from app.dtos.reading import ReadingDTO
|
||||||
|
from app.services.abc import IReadingService
|
||||||
|
|
||||||
|
|
||||||
|
class ReadingController(IReadingController):
|
||||||
|
|
||||||
|
def __init__(self, reading_service: IReadingService):
|
||||||
|
self._service = reading_service
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def import_exam(self, exercises: UploadFile, solutions: UploadFile = None):
|
||||||
|
return await self._service.import_exam(exercises, solutions)
|
||||||
|
|
||||||
|
async def generate_reading_passage(self, passage: int, topic: Optional[str], word_count: Optional[int]):
|
||||||
|
return await self._service.generate_reading_passage(passage, topic, word_count)
|
||||||
|
|
||||||
|
async def generate_reading_exercises(self, passage: int, dto: ReadingDTO):
|
||||||
|
return await self._service.generate_reading_exercises(dto)
|
||||||
47
app/controllers/impl/speaking.py
Normal file
47
app/controllers/impl/speaking.py
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from fastapi import BackgroundTasks
|
||||||
|
|
||||||
|
from app.controllers.abc import ISpeakingController
|
||||||
|
from app.dtos.speaking import SaveSpeakingDTO
|
||||||
|
|
||||||
|
from app.services.abc import ISpeakingService
|
||||||
|
from app.configs.constants import ExamVariant, MinTimers
|
||||||
|
from app.configs.question_templates import getSpeakingTemplate
|
||||||
|
|
||||||
|
|
||||||
|
class SpeakingController(ISpeakingController):
|
||||||
|
|
||||||
|
def __init__(self, speaking_service: ISpeakingService):
|
||||||
|
self._service = speaking_service
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def get_speaking_part(self, task: int, topic: str, difficulty: str, second_topic: Optional[str] = None):
|
||||||
|
return await self._service.get_speaking_part(task, topic, difficulty, second_topic)
|
||||||
|
|
||||||
|
async def save_speaking(self, data: SaveSpeakingDTO, background_tasks: BackgroundTasks):
|
||||||
|
exercises = data.exercises
|
||||||
|
min_timer = data.minTimer
|
||||||
|
|
||||||
|
template = getSpeakingTemplate()
|
||||||
|
template["minTimer"] = min_timer
|
||||||
|
|
||||||
|
if min_timer < MinTimers.SPEAKING_MIN_TIMER_DEFAULT:
|
||||||
|
template["variant"] = ExamVariant.PARTIAL.value
|
||||||
|
else:
|
||||||
|
template["variant"] = ExamVariant.FULL.value
|
||||||
|
|
||||||
|
req_id = str(uuid.uuid4())
|
||||||
|
self._logger.info(f'Received request to save speaking with id: {req_id}')
|
||||||
|
|
||||||
|
background_tasks.add_task(self._service.create_videos_and_save_to_db, exercises, template, req_id)
|
||||||
|
|
||||||
|
self._logger.info('Started background task to save speaking.')
|
||||||
|
|
||||||
|
# Return response without waiting for create_videos_and_save_to_db to finish
|
||||||
|
return {**template, "id": req_id}
|
||||||
|
|
||||||
|
async def generate_video(self, *args, **kwargs):
|
||||||
|
return await self._service.generate_video(*args, **kwargs)
|
||||||
17
app/controllers/impl/training.py
Normal file
17
app/controllers/impl/training.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from app.controllers.abc import ITrainingController
|
||||||
|
from app.dtos.training import FetchTipsDTO
|
||||||
|
from app.services.abc import ITrainingService
|
||||||
|
|
||||||
|
|
||||||
|
class TrainingController(ITrainingController):
|
||||||
|
|
||||||
|
def __init__(self, training_service: ITrainingService):
|
||||||
|
self._service = training_service
|
||||||
|
|
||||||
|
async def fetch_tips(self, data: FetchTipsDTO):
|
||||||
|
return await self._service.fetch_tips(data.context, data.question, data.answer, data.correct_answer)
|
||||||
|
|
||||||
|
async def get_training_content(self, data: Dict):
|
||||||
|
return await self._service.get_training_content(data)
|
||||||
12
app/controllers/impl/user.py
Normal file
12
app/controllers/impl/user.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
from app.controllers.abc import IUserController
|
||||||
|
from app.dtos.user_batch import BatchUsersDTO
|
||||||
|
from app.services.abc import IUserService
|
||||||
|
|
||||||
|
|
||||||
|
class UserController(IUserController):
|
||||||
|
|
||||||
|
def __init__(self, user_service: IUserService):
|
||||||
|
self._service = user_service
|
||||||
|
|
||||||
|
async def batch_import(self, batch: BatchUsersDTO):
|
||||||
|
return await self._service.batch_users(batch)
|
||||||
11
app/controllers/impl/writing.py
Normal file
11
app/controllers/impl/writing.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from app.controllers.abc import IWritingController
|
||||||
|
from app.services.abc import IWritingService
|
||||||
|
|
||||||
|
|
||||||
|
class WritingController(IWritingController):
|
||||||
|
|
||||||
|
def __init__(self, writing_service: IWritingService):
|
||||||
|
self._service = writing_service
|
||||||
|
|
||||||
|
async def get_writing_task_general_question(self, task: int, topic: str, difficulty: str):
|
||||||
|
return await self._service.get_writing_task_general_question(task, topic, difficulty)
|
||||||
0
app/dtos/__init__.py
Normal file
0
app/dtos/__init__.py
Normal file
0
app/dtos/exams/__init__.py
Normal file
0
app/dtos/exams/__init__.py
Normal file
@@ -1,57 +1,57 @@
|
|||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from typing import List, Dict, Union, Optional, Any
|
from typing import List, Dict, Union, Optional
|
||||||
from uuid import uuid4, UUID
|
from uuid import uuid4, UUID
|
||||||
|
|
||||||
|
|
||||||
class Option(BaseModel):
|
class Option(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
text: str
|
text: str
|
||||||
|
|
||||||
|
|
||||||
class MultipleChoiceQuestion(BaseModel):
|
class MultipleChoiceQuestion(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
prompt: str
|
prompt: str
|
||||||
variant: str = "text"
|
variant: str = "text"
|
||||||
solution: str
|
solution: str
|
||||||
options: List[Option]
|
options: List[Option]
|
||||||
|
|
||||||
|
|
||||||
class MultipleChoiceExercise(BaseModel):
|
class MultipleChoiceExercise(BaseModel):
|
||||||
id: UUID = Field(default_factory=uuid4)
|
id: UUID = Field(default_factory=uuid4)
|
||||||
type: str = "multipleChoice"
|
type: str = "multipleChoice"
|
||||||
prompt: str = "Select the appropriate option."
|
prompt: str = "Select the appropriate option."
|
||||||
questions: List[MultipleChoiceQuestion]
|
questions: List[MultipleChoiceQuestion]
|
||||||
userSolutions: List = Field(default_factory=list)
|
userSolutions: List = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
class FillBlanksWord(BaseModel):
|
class FillBlanksWord(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
options: Dict[str, str]
|
options: Dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
class FillBlanksSolution(BaseModel):
|
class FillBlanksSolution(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
solution: str
|
solution: str
|
||||||
|
|
||||||
|
|
||||||
class FillBlanksExercise(BaseModel):
|
class FillBlanksExercise(BaseModel):
|
||||||
id: UUID = Field(default_factory=uuid4)
|
id: UUID = Field(default_factory=uuid4)
|
||||||
type: str = "fillBlanks"
|
type: str = "fillBlanks"
|
||||||
variant: str = "mc"
|
variant: str = "mc"
|
||||||
prompt: str = "Click a blank to select the appropriate word for it."
|
prompt: str = "Click a blank to select the appropriate word for it."
|
||||||
text: str
|
text: str
|
||||||
solutions: List[FillBlanksSolution]
|
solutions: List[FillBlanksSolution]
|
||||||
words: List[FillBlanksWord]
|
words: List[FillBlanksWord]
|
||||||
userSolutions: List = Field(default_factory=list)
|
userSolutions: List = Field(default_factory=list)
|
||||||
|
|
||||||
|
|
||||||
Exercise = Union[MultipleChoiceExercise, FillBlanksExercise]
|
Exercise = Union[MultipleChoiceExercise, FillBlanksExercise]
|
||||||
|
|
||||||
|
|
||||||
class Part(BaseModel):
|
class Part(BaseModel):
|
||||||
exercises: List[Exercise]
|
exercises: List[Exercise]
|
||||||
context: Optional[str] = Field(default=None)
|
context: Optional[str] = Field(default=None)
|
||||||
|
|
||||||
|
|
||||||
class Exam(BaseModel):
|
class Exam(BaseModel):
|
||||||
parts: List[Part]
|
parts: List[Part]
|
||||||
110
app/dtos/exams/reading.py
Normal file
110
app/dtos/exams/reading.py
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
from enum import Enum
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from typing import List, Union
|
||||||
|
from uuid import uuid4, UUID
|
||||||
|
|
||||||
|
|
||||||
|
class WriteBlanksSolution(BaseModel):
|
||||||
|
id: str
|
||||||
|
solution: List[str]
|
||||||
|
|
||||||
|
class WriteBlanksExercise(BaseModel):
|
||||||
|
id: UUID = Field(default_factory=uuid4)
|
||||||
|
type: str = "writeBlanks"
|
||||||
|
maxWords: int
|
||||||
|
solutions: List[WriteBlanksSolution]
|
||||||
|
text: str
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prompt(self) -> str:
|
||||||
|
return f"Choose no more than {self.maxWords} words and/or a number from the passage for each answer."
|
||||||
|
|
||||||
|
|
||||||
|
class MatchSentencesOption(BaseModel):
|
||||||
|
id: str
|
||||||
|
sentence: str
|
||||||
|
|
||||||
|
class MatchSentencesSentence(MatchSentencesOption):
|
||||||
|
solution: str
|
||||||
|
|
||||||
|
class MatchSentencesVariant(str, Enum):
|
||||||
|
HEADING = "heading"
|
||||||
|
IDEAMATCH = "ideaMatch"
|
||||||
|
|
||||||
|
|
||||||
|
class MatchSentencesExercise(BaseModel):
|
||||||
|
options: List[MatchSentencesOption]
|
||||||
|
sentences: List[MatchSentencesSentence]
|
||||||
|
type: str = "matchSentences"
|
||||||
|
variant: MatchSentencesVariant
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prompt(self) -> str:
|
||||||
|
return (
|
||||||
|
"Choose the correct heading for paragraphs from the list of headings below."
|
||||||
|
if self.variant == MatchSentencesVariant.HEADING else
|
||||||
|
"Choose the correct author for the ideas/opinions from the list of authors below."
|
||||||
|
)
|
||||||
|
|
||||||
|
class TrueFalseSolution(str, Enum):
|
||||||
|
TRUE = "true"
|
||||||
|
FALSE = "false"
|
||||||
|
NOT_GIVEN = "not_given"
|
||||||
|
|
||||||
|
class TrueFalseQuestions(BaseModel):
|
||||||
|
prompt: str
|
||||||
|
solution: TrueFalseSolution
|
||||||
|
id: str
|
||||||
|
|
||||||
|
class TrueFalseExercise(BaseModel):
|
||||||
|
id: UUID = Field(default_factory=uuid4)
|
||||||
|
questions: List[TrueFalseQuestions]
|
||||||
|
type: str = "trueFalse"
|
||||||
|
prompt: str = "Do the following statements agree with the information given in the Reading Passage?"
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
class FillBlanksSolution(BaseModel):
|
||||||
|
id: str
|
||||||
|
solution: str
|
||||||
|
|
||||||
|
class FillBlanksWord(BaseModel):
|
||||||
|
letter: str
|
||||||
|
word: str
|
||||||
|
|
||||||
|
class FillBlanksExercise(BaseModel):
|
||||||
|
id: UUID = Field(default_factory=uuid4)
|
||||||
|
solutions: List[FillBlanksSolution]
|
||||||
|
text: str
|
||||||
|
type: str = "fillBlanks"
|
||||||
|
words: List[FillBlanksWord]
|
||||||
|
allowRepetition: bool = False
|
||||||
|
|
||||||
|
@property
|
||||||
|
def prompt(self) -> str:
|
||||||
|
prompt = "Complete the summary below. Write the letter of the corresponding word(s) for it."
|
||||||
|
|
||||||
|
return (
|
||||||
|
f"{prompt}"
|
||||||
|
if len(self.solutions) == len(self.words) else
|
||||||
|
f"{prompt}\\nThere are more words than spaces so you will not use them all."
|
||||||
|
)
|
||||||
|
|
||||||
|
Exercise = Union[FillBlanksExercise, TrueFalseExercise, MatchSentencesExercise, WriteBlanksExercise]
|
||||||
|
|
||||||
|
|
||||||
|
class Context(BaseModel):
|
||||||
|
title: str
|
||||||
|
content: str
|
||||||
|
|
||||||
|
class Part(BaseModel):
|
||||||
|
exercises: List[Exercise]
|
||||||
|
text: Context
|
||||||
|
|
||||||
|
class Exam(BaseModel):
|
||||||
|
id: UUID = Field(default_factory=uuid4)
|
||||||
|
module: str = "reading"
|
||||||
|
minTimer: int
|
||||||
|
isDiagnostic: bool = False
|
||||||
|
parts: List[Part]
|
||||||
19
app/dtos/level.py
Normal file
19
app/dtos/level.py
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.configs.constants import LevelExerciseType
|
||||||
|
|
||||||
|
|
||||||
|
class LevelExercises(BaseModel):
|
||||||
|
type: LevelExerciseType
|
||||||
|
quantity: int
|
||||||
|
text_size: Optional[int]
|
||||||
|
sa_qty: Optional[int]
|
||||||
|
mc_qty: Optional[int]
|
||||||
|
topic: Optional[str]
|
||||||
|
|
||||||
|
class LevelExercisesDTO(BaseModel):
|
||||||
|
text: str
|
||||||
|
exercises: List[LevelExercises]
|
||||||
|
difficulty: Optional[str]
|
||||||
34
app/dtos/listening.py
Normal file
34
app/dtos/listening.py
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
import random
|
||||||
|
import uuid
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from app.configs.constants import MinTimers, EducationalContent, ListeningExerciseType
|
||||||
|
|
||||||
|
|
||||||
|
class SaveListeningDTO(BaseModel):
|
||||||
|
parts: List[Dict]
|
||||||
|
minTimer: int = MinTimers.LISTENING_MIN_TIMER_DEFAULT
|
||||||
|
difficulty: str = random.choice(EducationalContent.DIFFICULTIES)
|
||||||
|
id: str = str(uuid.uuid4())
|
||||||
|
|
||||||
|
|
||||||
|
class ListeningExercises(BaseModel):
|
||||||
|
type: ListeningExerciseType
|
||||||
|
quantity: int
|
||||||
|
|
||||||
|
class GenerateListeningExercises(BaseModel):
|
||||||
|
text: str
|
||||||
|
exercises: List[ListeningExercises]
|
||||||
|
difficulty: Optional[str]
|
||||||
|
|
||||||
|
class ConversationPayload(BaseModel):
|
||||||
|
name: str
|
||||||
|
gender: str
|
||||||
|
text: str
|
||||||
|
voice: str
|
||||||
|
|
||||||
|
class Dialog(BaseModel):
|
||||||
|
conversation: Optional[List[ConversationPayload]] = Field(default_factory=list)
|
||||||
|
monologue: Optional[str] = None
|
||||||
17
app/dtos/reading.py
Normal file
17
app/dtos/reading.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
import random
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
|
||||||
|
from app.configs.constants import ReadingExerciseType, EducationalContent
|
||||||
|
|
||||||
|
class ReadingExercise(BaseModel):
|
||||||
|
type: ReadingExerciseType
|
||||||
|
quantity: int
|
||||||
|
num_random_words: Optional[int] = Field(1)
|
||||||
|
max_words: Optional[int] = Field(3)
|
||||||
|
|
||||||
|
class ReadingDTO(BaseModel):
|
||||||
|
text: str = Field(...)
|
||||||
|
exercises: List[ReadingExercise] = Field(...)
|
||||||
|
difficulty: str = Field(random.choice(EducationalContent.DIFFICULTIES))
|
||||||
@@ -1,29 +1,29 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing import List, Dict, Union, Any, Optional
|
from typing import List, Dict, Union, Any, Optional
|
||||||
|
|
||||||
|
|
||||||
class Option(BaseModel):
|
class Option(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
text: str
|
text: str
|
||||||
|
|
||||||
|
|
||||||
class MultipleChoiceQuestion(BaseModel):
|
class MultipleChoiceQuestion(BaseModel):
|
||||||
type: str = "multipleChoice"
|
type: str = "multipleChoice"
|
||||||
id: str
|
id: str
|
||||||
prompt: str
|
prompt: str
|
||||||
variant: str = "text"
|
variant: str = "text"
|
||||||
options: List[Option]
|
options: List[Option]
|
||||||
|
|
||||||
|
|
||||||
class FillBlanksWord(BaseModel):
|
class FillBlanksWord(BaseModel):
|
||||||
type: str = "fillBlanks"
|
type: str = "fillBlanks"
|
||||||
id: str
|
id: str
|
||||||
options: Dict[str, str]
|
options: Dict[str, str]
|
||||||
|
|
||||||
|
|
||||||
Component = Union[MultipleChoiceQuestion, FillBlanksWord, Dict[str, Any]]
|
Component = Union[MultipleChoiceQuestion, FillBlanksWord, Dict[str, Any]]
|
||||||
|
|
||||||
|
|
||||||
class Sheet(BaseModel):
|
class Sheet(BaseModel):
|
||||||
batch: Optional[int] = None
|
batch: Optional[int] = None
|
||||||
components: List[Component]
|
components: List[Component]
|
||||||
42
app/dtos/speaking.py
Normal file
42
app/dtos/speaking.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import random
|
||||||
|
from typing import List, Dict
|
||||||
|
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
from app.configs.constants import MinTimers, ELAIAvatars
|
||||||
|
|
||||||
|
|
||||||
|
class SaveSpeakingDTO(BaseModel):
|
||||||
|
exercises: List[Dict]
|
||||||
|
minTimer: int = MinTimers.SPEAKING_MIN_TIMER_DEFAULT
|
||||||
|
|
||||||
|
|
||||||
|
class GradeSpeakingDTO(BaseModel):
|
||||||
|
question: str
|
||||||
|
answer: str
|
||||||
|
|
||||||
|
|
||||||
|
class GradeSpeakingAnswersDTO(BaseModel):
|
||||||
|
answers: List[Dict]
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateVideo1DTO(BaseModel):
|
||||||
|
avatar: str = (random.choice(list(ELAIAvatars))).name
|
||||||
|
questions: List[str]
|
||||||
|
first_topic: str
|
||||||
|
second_topic: str
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateVideo2DTO(BaseModel):
|
||||||
|
avatar: str = (random.choice(list(ELAIAvatars))).name
|
||||||
|
prompts: List[str] = []
|
||||||
|
suffix: str = ""
|
||||||
|
question: str
|
||||||
|
topic: str
|
||||||
|
|
||||||
|
|
||||||
|
class GenerateVideo3DTO(BaseModel):
|
||||||
|
avatar: str = (random.choice(list(ELAIAvatars))).name
|
||||||
|
questions: List[str]
|
||||||
|
topic: str
|
||||||
|
|
||||||
@@ -1,29 +1,37 @@
|
|||||||
from pydantic import BaseModel
|
from pydantic import BaseModel
|
||||||
from typing import List
|
from typing import List
|
||||||
|
|
||||||
|
|
||||||
class QueryDTO(BaseModel):
|
class FetchTipsDTO(BaseModel):
|
||||||
category: str
|
context: str
|
||||||
text: str
|
question: str
|
||||||
|
answer: str
|
||||||
|
correct_answer: str
|
||||||
class DetailsDTO(BaseModel):
|
|
||||||
exam_id: str
|
|
||||||
date: int
|
class QueryDTO(BaseModel):
|
||||||
performance_comment: str
|
category: str
|
||||||
detailed_summary: str
|
text: str
|
||||||
|
|
||||||
|
|
||||||
class WeakAreaDTO(BaseModel):
|
class DetailsDTO(BaseModel):
|
||||||
area: str
|
exam_id: str
|
||||||
comment: str
|
date: int
|
||||||
|
performance_comment: str
|
||||||
|
detailed_summary: str
|
||||||
class TrainingContentDTO(BaseModel):
|
|
||||||
details: List[DetailsDTO]
|
|
||||||
weak_areas: List[WeakAreaDTO]
|
class WeakAreaDTO(BaseModel):
|
||||||
queries: List[QueryDTO]
|
area: str
|
||||||
|
comment: str
|
||||||
|
|
||||||
class TipsDTO(BaseModel):
|
|
||||||
tip_ids: List[str]
|
class TrainingContentDTO(BaseModel):
|
||||||
|
details: List[DetailsDTO]
|
||||||
|
weak_areas: List[WeakAreaDTO]
|
||||||
|
queries: List[QueryDTO]
|
||||||
|
|
||||||
|
|
||||||
|
class TipsDTO(BaseModel):
|
||||||
|
tip_ids: List[str]
|
||||||
|
|
||||||
@@ -2,7 +2,6 @@ import uuid
|
|||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
class DemographicInfo(BaseModel):
|
class DemographicInfo(BaseModel):
|
||||||
@@ -11,8 +10,8 @@ class DemographicInfo(BaseModel):
|
|||||||
country: Optional[str] = None
|
country: Optional[str] = None
|
||||||
|
|
||||||
class Entity(BaseModel):
|
class Entity(BaseModel):
|
||||||
id: str
|
id: str
|
||||||
role: str
|
role: str
|
||||||
|
|
||||||
|
|
||||||
class UserDTO(BaseModel):
|
class UserDTO(BaseModel):
|
||||||
6
app/dtos/writing.py
Normal file
6
app/dtos/writing.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
|
||||||
|
class WritingGradeTaskDTO(BaseModel):
|
||||||
|
question: str
|
||||||
|
answer: str
|
||||||
6
app/exceptions/__init__.py
Normal file
6
app/exceptions/__init__.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from .exceptions import CustomException, UnauthorizedException
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"CustomException",
|
||||||
|
"UnauthorizedException"
|
||||||
|
]
|
||||||
17
app/exceptions/exceptions.py
Normal file
17
app/exceptions/exceptions.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from http import HTTPStatus
|
||||||
|
|
||||||
|
|
||||||
|
class CustomException(Exception):
|
||||||
|
code = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||||
|
error_code = HTTPStatus.INTERNAL_SERVER_ERROR
|
||||||
|
message = HTTPStatus.INTERNAL_SERVER_ERROR.description
|
||||||
|
|
||||||
|
def __init__(self, message=None):
|
||||||
|
if message:
|
||||||
|
self.message = message
|
||||||
|
|
||||||
|
|
||||||
|
class UnauthorizedException(CustomException):
|
||||||
|
code = HTTPStatus.UNAUTHORIZED
|
||||||
|
error_code = HTTPStatus.UNAUTHORIZED
|
||||||
|
message = HTTPStatus.UNAUTHORIZED.description
|
||||||
11
app/helpers/__init__.py
Normal file
11
app/helpers/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from .file import FileHelper
|
||||||
|
from .text import TextHelper
|
||||||
|
from .token_counter import count_tokens
|
||||||
|
from .exercises import ExercisesHelper
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FileHelper",
|
||||||
|
"TextHelper",
|
||||||
|
"count_tokens",
|
||||||
|
"ExercisesHelper",
|
||||||
|
]
|
||||||
249
app/helpers/exercises.py
Normal file
249
app/helpers/exercises.py
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
import queue
|
||||||
|
import random
|
||||||
|
import re
|
||||||
|
import string
|
||||||
|
from wonderwords import RandomWord
|
||||||
|
|
||||||
|
from .text import TextHelper
|
||||||
|
|
||||||
|
|
||||||
|
class ExercisesHelper:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def divide_number_into_parts(number, parts):
|
||||||
|
if number < parts:
|
||||||
|
return None
|
||||||
|
|
||||||
|
part_size = number // parts
|
||||||
|
remaining = number % parts
|
||||||
|
|
||||||
|
q = queue.Queue()
|
||||||
|
|
||||||
|
for i in range(parts):
|
||||||
|
if i < remaining:
|
||||||
|
q.put(part_size + 1)
|
||||||
|
else:
|
||||||
|
q.put(part_size)
|
||||||
|
|
||||||
|
return q
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fix_exercise_ids(exercise, start_id):
|
||||||
|
# Initialize the starting ID for the first exercise
|
||||||
|
current_id = start_id
|
||||||
|
|
||||||
|
questions = exercise["questions"]
|
||||||
|
|
||||||
|
# Iterate through questions and update the "id" value
|
||||||
|
for question in questions:
|
||||||
|
question["id"] = str(current_id)
|
||||||
|
current_id += 1
|
||||||
|
|
||||||
|
return exercise
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def replace_first_occurrences_with_placeholders(text: str, words_to_replace: list, start_id):
|
||||||
|
for i, word in enumerate(words_to_replace, start=start_id):
|
||||||
|
# Create a case-insensitive regular expression pattern
|
||||||
|
pattern = re.compile(r'\b' + re.escape(word) + r'\b', re.IGNORECASE)
|
||||||
|
placeholder = '{{' + str(i) + '}}'
|
||||||
|
text = pattern.sub(placeholder, text, 1)
|
||||||
|
return text
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def replace_first_occurrences_with_placeholders_notes(notes: list, words_to_replace: list, start_id):
|
||||||
|
replaced_notes = []
|
||||||
|
for i, note in enumerate(notes, start=0):
|
||||||
|
word = words_to_replace[i]
|
||||||
|
pattern = re.compile(r'\b' + re.escape(word) + r'\b', re.IGNORECASE)
|
||||||
|
placeholder = '{{' + str(start_id + i) + '}}'
|
||||||
|
note = pattern.sub(placeholder, note, 1)
|
||||||
|
replaced_notes.append(note)
|
||||||
|
return replaced_notes
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def add_random_words_and_shuffle(word_array, num_random_words):
|
||||||
|
r = RandomWord()
|
||||||
|
random_words_selected = r.random_words(num_random_words)
|
||||||
|
|
||||||
|
combined_array = word_array + random_words_selected
|
||||||
|
|
||||||
|
random.shuffle(combined_array)
|
||||||
|
|
||||||
|
result = []
|
||||||
|
for i, word in enumerate(combined_array):
|
||||||
|
letter = chr(65 + i) # chr(65) is 'A'
|
||||||
|
result.append({"letter": letter, "word": word})
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fillblanks_build_solutions_array(words, start_id):
|
||||||
|
solutions = []
|
||||||
|
for i, word in enumerate(words, start=start_id):
|
||||||
|
solutions.append(
|
||||||
|
{
|
||||||
|
"id": str(i),
|
||||||
|
"solution": word
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return solutions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def remove_excess_questions(questions: [], quantity):
|
||||||
|
count_true = 0
|
||||||
|
result = []
|
||||||
|
|
||||||
|
for item in reversed(questions):
|
||||||
|
if item.get('solution') == 'true' and count_true < quantity:
|
||||||
|
count_true += 1
|
||||||
|
else:
|
||||||
|
result.append(item)
|
||||||
|
|
||||||
|
result.reverse()
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_write_blanks_text(questions: [], start_id):
|
||||||
|
result = ""
|
||||||
|
for i, q in enumerate(questions, start=start_id):
|
||||||
|
placeholder = '{{' + str(i) + '}}'
|
||||||
|
result = result + q["question"] + placeholder + "\\n"
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_write_blanks_text_form(form: [], start_id):
|
||||||
|
result = ""
|
||||||
|
replaced_words = []
|
||||||
|
for i, entry in enumerate(form, start=start_id):
|
||||||
|
placeholder = '{{' + str(i) + '}}'
|
||||||
|
# Use regular expression to find the string after ':'
|
||||||
|
match = re.search(r'(?<=:)\s*(.*)', entry)
|
||||||
|
# Extract the matched string
|
||||||
|
original_string = match.group(1)
|
||||||
|
# Split the string into words
|
||||||
|
words = re.findall(r'\b\w+\b', original_string)
|
||||||
|
# Remove words with only one letter
|
||||||
|
filtered_words = [word for word in words if len(word) > 1]
|
||||||
|
# Choose a random word from the list of words
|
||||||
|
selected_word = random.choice(filtered_words)
|
||||||
|
pattern = re.compile(r'\b' + re.escape(selected_word) + r'\b', re.IGNORECASE)
|
||||||
|
|
||||||
|
# Replace the chosen word with the placeholder
|
||||||
|
replaced_string = pattern.sub(placeholder, original_string, 1)
|
||||||
|
# Construct the final replaced string
|
||||||
|
replaced_string = entry.replace(original_string, replaced_string)
|
||||||
|
|
||||||
|
result = result + replaced_string + "\\n"
|
||||||
|
# Save the replaced word or use it as needed
|
||||||
|
# For example, you can save it to a file or a list
|
||||||
|
replaced_words.append(selected_word)
|
||||||
|
return result, replaced_words
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_write_blanks_solutions(questions: [], start_id):
|
||||||
|
solutions = []
|
||||||
|
for i, q in enumerate(questions, start=start_id):
|
||||||
|
solution = [q["possible_answers"]] if isinstance(q["possible_answers"], str) else q["possible_answers"]
|
||||||
|
|
||||||
|
solutions.append(
|
||||||
|
{
|
||||||
|
"id": str(i),
|
||||||
|
"solution": solution
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return solutions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_write_blanks_solutions_listening(words: [], start_id):
|
||||||
|
solutions = []
|
||||||
|
for i, word in enumerate(words, start=start_id):
|
||||||
|
solution = [word] if isinstance(word, str) else word
|
||||||
|
|
||||||
|
solutions.append(
|
||||||
|
{
|
||||||
|
"id": str(i),
|
||||||
|
"solution": solution
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return solutions
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def answer_word_limit_ok(question):
|
||||||
|
# Check if any option in any solution has more than three words
|
||||||
|
return not any(
|
||||||
|
len(option.split()) > 3
|
||||||
|
for solution in question["solutions"]
|
||||||
|
for option in solution["solution"]
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def assign_letters_to_paragraphs(paragraphs):
|
||||||
|
result = []
|
||||||
|
letters = iter(string.ascii_uppercase)
|
||||||
|
for paragraph in paragraphs.split("\n\n"):
|
||||||
|
if TextHelper.has_x_words(paragraph, 10):
|
||||||
|
result.append({'paragraph': paragraph.strip(), 'letter': next(letters)})
|
||||||
|
return result
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def contains_empty_dict(arr):
|
||||||
|
return any(elem == {} for elem in arr)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def fix_writing_overall(overall: float, task_response: dict):
|
||||||
|
grades = [category["grade"] for category in task_response.values()]
|
||||||
|
|
||||||
|
if overall > max(grades) or overall < min(grades):
|
||||||
|
total_sum = sum(grades)
|
||||||
|
average = total_sum / len(grades)
|
||||||
|
rounded_average = round(average, 0)
|
||||||
|
return rounded_average
|
||||||
|
|
||||||
|
return overall
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_options(ideas):
|
||||||
|
options = []
|
||||||
|
letters = iter(string.ascii_uppercase)
|
||||||
|
for idea in ideas:
|
||||||
|
options.append({
|
||||||
|
"id": next(letters),
|
||||||
|
"sentence": idea["from"]
|
||||||
|
})
|
||||||
|
return options
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def build_sentences(ideas, start_id):
|
||||||
|
sentences = []
|
||||||
|
letters = iter(string.ascii_uppercase)
|
||||||
|
for idea in ideas:
|
||||||
|
sentences.append({
|
||||||
|
"solution": next(letters),
|
||||||
|
"sentence": idea["idea"]
|
||||||
|
})
|
||||||
|
|
||||||
|
random.shuffle(sentences)
|
||||||
|
for i, sentence in enumerate(sentences, start=start_id):
|
||||||
|
sentence["id"] = i
|
||||||
|
return sentences
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def randomize_mc_options_order(questions):
|
||||||
|
option_ids = ['A', 'B', 'C', 'D']
|
||||||
|
|
||||||
|
for question in questions:
|
||||||
|
# Store the original solution text
|
||||||
|
original_solution_text = next(
|
||||||
|
option['text'] for option in question['options'] if option['id'] == question['solution'])
|
||||||
|
|
||||||
|
# Shuffle the options
|
||||||
|
random.shuffle(question['options'])
|
||||||
|
|
||||||
|
# Update the option ids and find the new solution id
|
||||||
|
for idx, option in enumerate(question['options']):
|
||||||
|
option['id'] = option_ids[idx]
|
||||||
|
if option['text'] == original_solution_text:
|
||||||
|
question['solution'] = option['id']
|
||||||
|
|
||||||
|
return questions
|
||||||
@@ -1,97 +1,119 @@
|
|||||||
import base64
|
import base64
|
||||||
import io
|
import io
|
||||||
import os
|
import os
|
||||||
import shutil
|
import shutil
|
||||||
import subprocess
|
import subprocess
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional, Tuple
|
import datetime
|
||||||
|
from pathlib import Path
|
||||||
import numpy as np
|
from typing import Optional, Tuple
|
||||||
import pypandoc
|
|
||||||
from PIL import Image
|
import aiofiles
|
||||||
|
import numpy as np
|
||||||
|
import pypandoc
|
||||||
class FileHelper:
|
from PIL import Image
|
||||||
|
from fastapi import UploadFile
|
||||||
# Supposedly pandoc covers a wide range of file extensions only tested with docx
|
|
||||||
@staticmethod
|
|
||||||
def convert_file_to_pdf(input_path: str, output_path: str):
|
class FileHelper:
|
||||||
pypandoc.convert_file(input_path, 'pdf', outputfile=output_path, extra_args=[
|
|
||||||
'-V', 'geometry:paperwidth=5.5in',
|
@staticmethod
|
||||||
'-V', 'geometry:paperheight=8.5in',
|
def delete_files_older_than_one_day(directory: str):
|
||||||
'-V', 'geometry:margin=0.5in',
|
current_time = datetime.datetime.now()
|
||||||
'-V', 'pagestyle=empty'
|
|
||||||
])
|
for entry in os.scandir(directory):
|
||||||
|
if entry.is_file():
|
||||||
@staticmethod
|
file_path = Path(entry)
|
||||||
def convert_file_to_html(input_path: str, output_path: str):
|
file_name = file_path.name
|
||||||
pypandoc.convert_file(input_path, 'html', outputfile=output_path)
|
file_modified_time = datetime.datetime.fromtimestamp(file_path.stat().st_mtime)
|
||||||
|
time_difference = current_time - file_modified_time
|
||||||
@staticmethod
|
if time_difference.days > 1 and "placeholder" not in file_name:
|
||||||
def pdf_to_png(path_id: str):
|
file_path.unlink()
|
||||||
to_png = f"pdftoppm -png exercises.pdf page"
|
print(f"Deleted file: {file_path}")
|
||||||
result = subprocess.run(to_png, shell=True, cwd=f'./tmp/{path_id}', capture_output=True, text=True)
|
|
||||||
if result.returncode != 0:
|
# Supposedly pandoc covers a wide range of file extensions only tested with docx
|
||||||
raise Exception(
|
@staticmethod
|
||||||
f"Couldn't convert pdf to png. Failed to run command '{to_png}' -> ```cmd {result.stderr}```")
|
def convert_file_to_pdf(input_path: str, output_path: str):
|
||||||
|
pypandoc.convert_file(input_path, 'pdf', outputfile=output_path, extra_args=[
|
||||||
@staticmethod
|
'-V', 'geometry:paperwidth=5.5in',
|
||||||
def is_page_blank(image_bytes: bytes, image_threshold=10) -> bool:
|
'-V', 'geometry:paperheight=8.5in',
|
||||||
with Image.open(io.BytesIO(image_bytes)) as img:
|
'-V', 'geometry:margin=0.5in',
|
||||||
img_gray = img.convert('L')
|
'-V', 'pagestyle=empty'
|
||||||
img_array = np.array(img_gray)
|
])
|
||||||
non_white_pixels = np.sum(img_array < 255)
|
|
||||||
|
@staticmethod
|
||||||
return non_white_pixels <= image_threshold
|
def convert_file_to_html(input_path: str, output_path: str):
|
||||||
|
pypandoc.convert_file(input_path, 'html', outputfile=output_path)
|
||||||
@classmethod
|
|
||||||
def _encode_image(cls, image_path: str, image_threshold=10) -> Optional[str]:
|
@staticmethod
|
||||||
with open(image_path, "rb") as image_file:
|
def pdf_to_png(path_id: str):
|
||||||
image_bytes = image_file.read()
|
to_png = f"pdftoppm -png exercises.pdf page"
|
||||||
|
result = subprocess.run(to_png, shell=True, cwd=f'./tmp/{path_id}', capture_output=True, text=True)
|
||||||
if cls.is_page_blank(image_bytes, image_threshold):
|
if result.returncode != 0:
|
||||||
return None
|
raise Exception(
|
||||||
|
f"Couldn't convert pdf to png. Failed to run command '{to_png}' -> ```cmd {result.stderr}```")
|
||||||
return base64.b64encode(image_bytes).decode('utf-8')
|
|
||||||
|
@staticmethod
|
||||||
@classmethod
|
def is_page_blank(image_bytes: bytes, image_threshold=10) -> bool:
|
||||||
def b64_pngs(cls, path_id: str, files: list[str]):
|
with Image.open(io.BytesIO(image_bytes)) as img:
|
||||||
png_messages = []
|
img_gray = img.convert('L')
|
||||||
for filename in files:
|
img_array = np.array(img_gray)
|
||||||
b64_string = cls._encode_image(os.path.join(f'./tmp/{path_id}', filename))
|
non_white_pixels = np.sum(img_array < 255)
|
||||||
if b64_string:
|
|
||||||
png_messages.append({
|
return non_white_pixels <= image_threshold
|
||||||
"type": "image_url",
|
|
||||||
"image_url": {
|
@classmethod
|
||||||
"url": f"data:image/png;base64,{b64_string}"
|
async def _encode_image(cls, image_path: str, image_threshold=10) -> Optional[str]:
|
||||||
}
|
async with aiofiles.open(image_path, "rb") as image_file:
|
||||||
})
|
image_bytes = await image_file.read()
|
||||||
return png_messages
|
|
||||||
|
if cls.is_page_blank(image_bytes, image_threshold):
|
||||||
@staticmethod
|
return None
|
||||||
def remove_directory(path):
|
|
||||||
try:
|
return base64.b64encode(image_bytes).decode('utf-8')
|
||||||
if os.path.exists(path):
|
|
||||||
if os.path.isdir(path):
|
@classmethod
|
||||||
shutil.rmtree(path)
|
async def b64_pngs(cls, path_id: str, files: list[str]):
|
||||||
except Exception as e:
|
png_messages = []
|
||||||
print(f"An error occurred while trying to remove {path}: {str(e)}")
|
for filename in files:
|
||||||
|
b64_string = await cls._encode_image(os.path.join(f'./tmp/{path_id}', filename))
|
||||||
@staticmethod
|
if b64_string:
|
||||||
def remove_file(file_path):
|
png_messages.append({
|
||||||
try:
|
"type": "image_url",
|
||||||
if os.path.exists(file_path):
|
"image_url": {
|
||||||
if os.path.isfile(file_path):
|
"url": f"data:image/png;base64,{b64_string}"
|
||||||
os.remove(file_path)
|
}
|
||||||
except Exception as e:
|
})
|
||||||
print(f"An error occurred while trying to remove the file {file_path}: {str(e)}")
|
return png_messages
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def save_upload(file) -> Tuple[str, str]:
|
def remove_directory(path):
|
||||||
ext = file.filename.split('.')[-1]
|
try:
|
||||||
path_id = str(uuid.uuid4())
|
if os.path.exists(path):
|
||||||
os.makedirs(f'./tmp/{path_id}', exist_ok=True)
|
if os.path.isdir(path):
|
||||||
|
shutil.rmtree(path)
|
||||||
tmp_filename = f'./tmp/{path_id}/uploaded.{ext}'
|
except Exception as e:
|
||||||
file.save(tmp_filename)
|
print(f"An error occurred while trying to remove {path}: {str(e)}")
|
||||||
return ext, path_id
|
|
||||||
|
@staticmethod
|
||||||
|
def remove_file(file_path):
|
||||||
|
try:
|
||||||
|
if os.path.exists(file_path):
|
||||||
|
if os.path.isfile(file_path):
|
||||||
|
os.remove(file_path)
|
||||||
|
except Exception as e:
|
||||||
|
print(f"An error occurred while trying to remove the file {file_path}: {str(e)}")
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def save_upload(file: UploadFile, name: str = "upload", path_id: str = None) -> Tuple[str, str]:
|
||||||
|
ext = file.filename.split('.')[-1]
|
||||||
|
path_id = str(uuid.uuid4()) if path_id is None else path_id
|
||||||
|
os.makedirs(f'./tmp/{path_id}', exist_ok=True)
|
||||||
|
|
||||||
|
tmp_filename = f'./tmp/{path_id}/{name}.{ext}'
|
||||||
|
file_bytes: bytes = await file.read()
|
||||||
|
|
||||||
|
async with aiofiles.open(tmp_filename, 'wb') as file:
|
||||||
|
await file.write(file_bytes)
|
||||||
|
|
||||||
|
return ext, path_id
|
||||||
28
app/helpers/text.py
Normal file
28
app/helpers/text.py
Normal file
@@ -0,0 +1,28 @@
|
|||||||
|
from nltk.corpus import words
|
||||||
|
|
||||||
|
|
||||||
|
class TextHelper:
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def has_words(cls, text: str):
|
||||||
|
if not cls._has_common_words(text):
|
||||||
|
return False
|
||||||
|
english_words = set(words.words())
|
||||||
|
words_in_input = text.split()
|
||||||
|
return any(word.lower() in english_words for word in words_in_input)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def has_x_words(cls, text: str, quantity):
|
||||||
|
if not cls._has_common_words(text):
|
||||||
|
return False
|
||||||
|
english_words = set(words.words())
|
||||||
|
words_in_input = text.split()
|
||||||
|
english_word_count = sum(1 for word in words_in_input if word.lower() in english_words)
|
||||||
|
return english_word_count >= quantity
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _has_common_words(text: str):
|
||||||
|
english_words = {"the", "be", "to", "of", "and", "a", "in", "that", "have", "i"}
|
||||||
|
words_in_input = text.split()
|
||||||
|
english_word_count = sum(1 for word in words_in_input if word.lower() in english_words)
|
||||||
|
return english_word_count >= 10
|
||||||
@@ -1,88 +1,89 @@
|
|||||||
# This is a work in progress. There are still bugs. Once it is production-ready this will become a full repo.
|
# This is a work in progress. There are still bugs. Once it is production-ready this will become a full repo.
|
||||||
|
|
||||||
|
import tiktoken
|
||||||
def count_tokens(text, model_name="gpt-3.5-turbo", debug=False):
|
import nltk
|
||||||
"""
|
|
||||||
Count the number of tokens in a given text string without using the OpenAI API.
|
|
||||||
|
def count_tokens(text, model_name="gpt-3.5-turbo", debug=False):
|
||||||
This function tries three methods in the following order:
|
"""
|
||||||
1. tiktoken (preferred): Accurate token counting similar to the OpenAI API.
|
Count the number of tokens in a given text string without using the OpenAI API.
|
||||||
2. nltk: Token counting using the Natural Language Toolkit library.
|
|
||||||
3. split: Simple whitespace-based token counting as a fallback.
|
This function tries three methods in the following order:
|
||||||
|
1. tiktoken (preferred): Accurate token counting similar to the OpenAI API.
|
||||||
Usage:
|
2. nltk: Token counting using the Natural Language Toolkit library.
|
||||||
------
|
3. split: Simple whitespace-based token counting as a fallback.
|
||||||
text = "Your text here"
|
|
||||||
result = count_tokens(text, model_name="gpt-3.5-turbo", debug=True)
|
Usage:
|
||||||
print(result)
|
------
|
||||||
|
text = "Your text here"
|
||||||
Required libraries:
|
result = count_tokens(text, model_name="gpt-3.5-turbo", debug=True)
|
||||||
-------------------
|
print(result)
|
||||||
- tiktoken: Install with 'pip install tiktoken'
|
|
||||||
- nltk: Install with 'pip install nltk'
|
Required libraries:
|
||||||
|
-------------------
|
||||||
Parameters:
|
- tiktoken: Install with 'pip install tiktoken'
|
||||||
-----------
|
- nltk: Install with 'pip install nltk'
|
||||||
text : str
|
|
||||||
The text string for which you want to count tokens.
|
Parameters:
|
||||||
model_name : str, optional
|
-----------
|
||||||
The OpenAI model for which you want to count tokens (default: "gpt-3.5-turbo").
|
text : str
|
||||||
debug : bool, optional
|
The text string for which you want to count tokens.
|
||||||
Set to True to print error messages (default: False).
|
model_name : str, optional
|
||||||
|
The OpenAI model for which you want to count tokens (default: "gpt-3.5-turbo").
|
||||||
Returns:
|
debug : bool, optional
|
||||||
--------
|
Set to True to print error messages (default: False).
|
||||||
result : dict
|
|
||||||
A dictionary containing the number of tokens and the method used for counting.
|
Returns:
|
||||||
"""
|
--------
|
||||||
|
result : dict
|
||||||
# Try using tiktoken
|
A dictionary containing the number of tokens and the method used for counting.
|
||||||
try:
|
"""
|
||||||
import tiktoken
|
|
||||||
encoding = tiktoken.encoding_for_model(model_name)
|
# Try using tiktoken
|
||||||
num_tokens = len(encoding.encode(text))
|
try:
|
||||||
result = {"n_tokens": num_tokens, "method": "tiktoken"}
|
encoding = tiktoken.encoding_for_model(model_name)
|
||||||
return result
|
num_tokens = len(encoding.encode(text))
|
||||||
except Exception as e:
|
result = {"n_tokens": num_tokens, "method": "tiktoken"}
|
||||||
if debug:
|
return result
|
||||||
print(f"Error using tiktoken: {e}")
|
except Exception as e:
|
||||||
pass
|
if debug:
|
||||||
|
print(f"Error using tiktoken: {e}")
|
||||||
# Try using nltk
|
pass
|
||||||
try:
|
|
||||||
import nltk
|
# Try using nltk
|
||||||
nltk.download("punkt")
|
try:
|
||||||
tokens = nltk.word_tokenize(text)
|
# Passed nltk.download("punkt") to server.py's @asynccontextmanager
|
||||||
result = {"n_tokens": len(tokens), "method": "nltk"}
|
tokens = nltk.word_tokenize(text)
|
||||||
return result
|
result = {"n_tokens": len(tokens), "method": "nltk"}
|
||||||
except Exception as e:
|
return result
|
||||||
if debug:
|
except Exception as e:
|
||||||
print(f"Error using nltk: {e}")
|
if debug:
|
||||||
pass
|
print(f"Error using nltk: {e}")
|
||||||
|
pass
|
||||||
# If nltk and tiktoken fail, use a simple split-based method
|
|
||||||
tokens = text.split()
|
# If nltk and tiktoken fail, use a simple split-based method
|
||||||
result = {"n_tokens": len(tokens), "method": "split"}
|
tokens = text.split()
|
||||||
return result
|
result = {"n_tokens": len(tokens), "method": "split"}
|
||||||
|
return result
|
||||||
|
|
||||||
class TokenBuffer:
|
|
||||||
def __init__(self, max_tokens=2048):
|
class TokenBuffer:
|
||||||
self.max_tokens = max_tokens
|
def __init__(self, max_tokens=2048):
|
||||||
self.buffer = ""
|
self.max_tokens = max_tokens
|
||||||
self.token_lengths = []
|
self.buffer = ""
|
||||||
self.token_count = 0
|
self.token_lengths = []
|
||||||
|
self.token_count = 0
|
||||||
def update(self, text, model_name="gpt-3.5-turbo", debug=False):
|
|
||||||
new_tokens = count_tokens(text, model_name=model_name, debug=debug)["n_tokens"]
|
def update(self, text, model_name="gpt-3.5-turbo", debug=False):
|
||||||
self.token_count += new_tokens
|
new_tokens = count_tokens(text, model_name=model_name, debug=debug)["n_tokens"]
|
||||||
self.buffer += text
|
self.token_count += new_tokens
|
||||||
self.token_lengths.append(new_tokens)
|
self.buffer += text
|
||||||
|
self.token_lengths.append(new_tokens)
|
||||||
while self.token_count > self.max_tokens:
|
|
||||||
removed_tokens = self.token_lengths.pop(0)
|
while self.token_count > self.max_tokens:
|
||||||
self.token_count -= removed_tokens
|
removed_tokens = self.token_lengths.pop(0)
|
||||||
self.buffer = self.buffer.split(" ", removed_tokens)[-1]
|
self.token_count -= removed_tokens
|
||||||
|
self.buffer = self.buffer.split(" ", removed_tokens)[-1]
|
||||||
def get_buffer(self):
|
|
||||||
return self.buffer
|
def get_buffer(self):
|
||||||
|
return self.buffer
|
||||||
5
app/mappers/__init__.py
Normal file
5
app/mappers/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from .level import LevelMapper
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"LevelMapper"
|
||||||
|
]
|
||||||
@@ -1,66 +1,66 @@
|
|||||||
from typing import Dict, Any
|
from typing import Dict, Any
|
||||||
|
|
||||||
from pydantic import ValidationError
|
from pydantic import ValidationError
|
||||||
|
|
||||||
from modules.upload_level.exam_dtos import (
|
from app.dtos.exams.level import (
|
||||||
MultipleChoiceExercise,
|
MultipleChoiceExercise,
|
||||||
FillBlanksExercise,
|
FillBlanksExercise,
|
||||||
Part, Exam
|
Part, Exam
|
||||||
)
|
)
|
||||||
from modules.upload_level.sheet_dtos import Sheet, Option, MultipleChoiceQuestion, FillBlanksWord
|
from app.dtos.sheet import Sheet, Option, MultipleChoiceQuestion, FillBlanksWord
|
||||||
|
|
||||||
|
|
||||||
class ExamMapper:
|
class LevelMapper:
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def map_to_exam_model(response: Dict[str, Any]) -> Exam:
|
def map_to_exam_model(response: Dict[str, Any]) -> Exam:
|
||||||
parts = []
|
parts = []
|
||||||
for part in response['parts']:
|
for part in response['parts']:
|
||||||
part_exercises = part['exercises']
|
part_exercises = part['exercises']
|
||||||
context = part.get('context', None)
|
context = part.get('context', None)
|
||||||
|
|
||||||
exercises = []
|
exercises = []
|
||||||
for exercise in part_exercises:
|
for exercise in part_exercises:
|
||||||
exercise_type = exercise['type']
|
exercise_type = exercise['type']
|
||||||
if exercise_type == 'multipleChoice':
|
if exercise_type == 'multipleChoice':
|
||||||
exercise_model = MultipleChoiceExercise(**exercise)
|
exercise_model = MultipleChoiceExercise(**exercise)
|
||||||
elif exercise_type == 'fillBlanks':
|
elif exercise_type == 'fillBlanks':
|
||||||
exercise_model = FillBlanksExercise(**exercise)
|
exercise_model = FillBlanksExercise(**exercise)
|
||||||
else:
|
else:
|
||||||
raise ValidationError(f"Unknown exercise type: {exercise_type}")
|
raise ValidationError(f"Unknown exercise type: {exercise_type}")
|
||||||
|
|
||||||
exercises.append(exercise_model)
|
exercises.append(exercise_model)
|
||||||
|
|
||||||
part_kwargs = {"exercises": exercises}
|
part_kwargs = {"exercises": exercises}
|
||||||
if context is not None:
|
if context is not None:
|
||||||
part_kwargs["context"] = context
|
part_kwargs["context"] = context
|
||||||
|
|
||||||
part_model = Part(**part_kwargs)
|
part_model = Part(**part_kwargs)
|
||||||
parts.append(part_model)
|
parts.append(part_model)
|
||||||
|
|
||||||
return Exam(parts=parts)
|
return Exam(parts=parts)
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def map_to_sheet(response: Dict[str, Any]) -> Sheet:
|
def map_to_sheet(response: Dict[str, Any]) -> Sheet:
|
||||||
components = []
|
components = []
|
||||||
|
|
||||||
for item in response["components"]:
|
for item in response["components"]:
|
||||||
component_type = item["type"]
|
component_type = item["type"]
|
||||||
|
|
||||||
if component_type == "multipleChoice":
|
if component_type == "multipleChoice":
|
||||||
options = [Option(id=opt["id"], text=opt["text"]) for opt in item["options"]]
|
options = [Option(id=opt["id"], text=opt["text"]) for opt in item["options"]]
|
||||||
components.append(MultipleChoiceQuestion(
|
components.append(MultipleChoiceQuestion(
|
||||||
id=item["id"],
|
id=item["id"],
|
||||||
prompt=item["prompt"],
|
prompt=item["prompt"],
|
||||||
variant=item.get("variant", "text"),
|
variant=item.get("variant", "text"),
|
||||||
options=options
|
options=options
|
||||||
))
|
))
|
||||||
elif component_type == "fillBlanks":
|
elif component_type == "fillBlanks":
|
||||||
components.append(FillBlanksWord(
|
components.append(FillBlanksWord(
|
||||||
id=item["id"],
|
id=item["id"],
|
||||||
options=item["options"]
|
options=item["options"]
|
||||||
))
|
))
|
||||||
else:
|
else:
|
||||||
components.append(item)
|
components.append(item)
|
||||||
|
|
||||||
return Sheet(components=components)
|
return Sheet(components=components)
|
||||||
39
app/mappers/reading.py
Normal file
39
app/mappers/reading.py
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
from app.dtos.exams.reading import (
|
||||||
|
Part, Exam, Context, FillBlanksExercise,
|
||||||
|
TrueFalseExercise, MatchSentencesExercise,
|
||||||
|
WriteBlanksExercise
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class ReadingMapper:
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def map_to_exam_model(response: Dict[str, Any]) -> Exam:
|
||||||
|
parts = []
|
||||||
|
for part in response['parts']:
|
||||||
|
part_exercises = part['exercises']
|
||||||
|
context = Context(**part['text'])
|
||||||
|
|
||||||
|
model_map = {
|
||||||
|
'fillBlanks': FillBlanksExercise,
|
||||||
|
'trueFalse': TrueFalseExercise,
|
||||||
|
'matchSentences': MatchSentencesExercise,
|
||||||
|
'writeBlanks': WriteBlanksExercise
|
||||||
|
}
|
||||||
|
|
||||||
|
exercises = []
|
||||||
|
for exercise in part_exercises:
|
||||||
|
exercise_type = exercise['type']
|
||||||
|
exercises.append(model_map[exercise_type](**exercise))
|
||||||
|
|
||||||
|
part_kwargs = {
|
||||||
|
"exercises": exercises,
|
||||||
|
"text": context
|
||||||
|
}
|
||||||
|
|
||||||
|
part_model = Part(**part_kwargs)
|
||||||
|
parts.append(part_model)
|
||||||
|
|
||||||
|
return Exam(parts=parts, minTimer=response["minTimer"])
|
||||||
9
app/middlewares/__init__.py
Normal file
9
app/middlewares/__init__.py
Normal file
@@ -0,0 +1,9 @@
|
|||||||
|
from .authentication import AuthBackend, AuthenticationMiddleware
|
||||||
|
from .authorization import Authorized, IsAuthenticatedViaBearerToken
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"AuthBackend",
|
||||||
|
"AuthenticationMiddleware",
|
||||||
|
"Authorized",
|
||||||
|
"IsAuthenticatedViaBearerToken"
|
||||||
|
]
|
||||||
48
app/middlewares/authentication.py
Normal file
48
app/middlewares/authentication.py
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import os
|
||||||
|
from typing import Tuple
|
||||||
|
|
||||||
|
import jwt
|
||||||
|
from jwt import InvalidTokenError
|
||||||
|
from pydantic import BaseModel, Field
|
||||||
|
from starlette.authentication import AuthenticationBackend
|
||||||
|
from starlette.middleware.authentication import (
|
||||||
|
AuthenticationMiddleware as BaseAuthenticationMiddleware,
|
||||||
|
)
|
||||||
|
from starlette.requests import HTTPConnection
|
||||||
|
|
||||||
|
|
||||||
|
class Session(BaseModel):
|
||||||
|
authenticated: bool = Field(False, description="Is user authenticated?")
|
||||||
|
|
||||||
|
|
||||||
|
class AuthBackend(AuthenticationBackend):
|
||||||
|
async def authenticate(
|
||||||
|
self, conn: HTTPConnection
|
||||||
|
) -> Tuple[bool, Session]:
|
||||||
|
session = Session()
|
||||||
|
authorization: str = conn.headers.get("Authorization")
|
||||||
|
if not authorization:
|
||||||
|
return False, session
|
||||||
|
|
||||||
|
try:
|
||||||
|
scheme, token = authorization.split(" ")
|
||||||
|
if scheme.lower() != "bearer":
|
||||||
|
return False, session
|
||||||
|
except ValueError:
|
||||||
|
return False, session
|
||||||
|
|
||||||
|
jwt_secret_key = os.getenv("JWT_SECRET_KEY")
|
||||||
|
if not jwt_secret_key:
|
||||||
|
return False, session
|
||||||
|
|
||||||
|
try:
|
||||||
|
jwt.decode(token, jwt_secret_key, algorithms=["HS256"])
|
||||||
|
except InvalidTokenError:
|
||||||
|
return False, session
|
||||||
|
|
||||||
|
session.authenticated = True
|
||||||
|
return True, session
|
||||||
|
|
||||||
|
|
||||||
|
class AuthenticationMiddleware(BaseAuthenticationMiddleware):
|
||||||
|
pass
|
||||||
36
app/middlewares/authorization.py
Normal file
36
app/middlewares/authorization.py
Normal file
@@ -0,0 +1,36 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Type
|
||||||
|
|
||||||
|
from fastapi import Request
|
||||||
|
from fastapi.openapi.models import APIKey, APIKeyIn
|
||||||
|
from fastapi.security.base import SecurityBase
|
||||||
|
|
||||||
|
from app.exceptions import CustomException, UnauthorizedException
|
||||||
|
|
||||||
|
|
||||||
|
class BaseAuthorization(ABC):
|
||||||
|
exception = CustomException
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def has_permission(self, request: Request) -> bool:
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class IsAuthenticatedViaBearerToken(BaseAuthorization):
|
||||||
|
exception = UnauthorizedException
|
||||||
|
|
||||||
|
async def has_permission(self, request: Request) -> bool:
|
||||||
|
return request.user.authenticated
|
||||||
|
|
||||||
|
|
||||||
|
class Authorized(SecurityBase):
|
||||||
|
def __init__(self, permissions: List[Type[BaseAuthorization]]):
|
||||||
|
self.permissions = permissions
|
||||||
|
self.model: APIKey = APIKey(**{"in": APIKeyIn.header}, name="Authorization")
|
||||||
|
self.scheme_name = self.__class__.__name__
|
||||||
|
|
||||||
|
async def __call__(self, request: Request):
|
||||||
|
for permission in self.permissions:
|
||||||
|
cls = permission()
|
||||||
|
if not await cls.has_permission(request=request):
|
||||||
|
raise cls.exception
|
||||||
0
app/repositories/__init__.py
Normal file
0
app/repositories/__init__.py
Normal file
7
app/repositories/abc/__init__.py
Normal file
7
app/repositories/abc/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from .file_storage import IFileStorage
|
||||||
|
from .document_store import IDocumentStore
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"IFileStorage",
|
||||||
|
"IDocumentStore"
|
||||||
|
]
|
||||||
18
app/repositories/abc/document_store.py
Normal file
18
app/repositories/abc/document_store.py
Normal file
@@ -0,0 +1,18 @@
|
|||||||
|
from abc import ABC
|
||||||
|
|
||||||
|
from typing import Dict, Optional, List
|
||||||
|
|
||||||
|
|
||||||
|
class IDocumentStore(ABC):
|
||||||
|
|
||||||
|
async def save_to_db(self, collection: str, item: Dict, doc_id: Optional[str] = None) -> Optional[str]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def get_doc_by_id(self, collection: str, doc_id: str) -> Optional[Dict]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def find(self, collection: str, query: Optional[Dict]) -> List[Dict]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
async def update(self, collection: str, filter_query: Dict, update: Dict) -> Optional[str]:
|
||||||
|
pass
|
||||||
16
app/repositories/abc/file_storage.py
Normal file
16
app/repositories/abc/file_storage.py
Normal file
@@ -0,0 +1,16 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class IFileStorage(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def download_firebase_file(self, source_blob_name, destination_file_name):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def upload_file_firebase_get_url(self, destination_blob_name, source_file_name):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def make_public(self, blob_name: str):
|
||||||
|
pass
|
||||||
8
app/repositories/impl/__init__.py
Normal file
8
app/repositories/impl/__init__.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from .document_stores import *
|
||||||
|
from app.repositories.impl.file_storage.firebase import FirebaseStorage
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FirebaseStorage"
|
||||||
|
]
|
||||||
|
|
||||||
|
__all__.extend(document_stores.__all__)
|
||||||
7
app/repositories/impl/document_stores/__init__.py
Normal file
7
app/repositories/impl/document_stores/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
from .firestore import Firestore
|
||||||
|
#from .mongo import MongoDB
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"Firestore",
|
||||||
|
#"MongoDB"
|
||||||
|
]
|
||||||
50
app/repositories/impl/document_stores/firestore.py
Normal file
50
app/repositories/impl/document_stores/firestore.py
Normal file
@@ -0,0 +1,50 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Optional, List, Dict
|
||||||
|
|
||||||
|
from google.cloud.firestore_v1.async_client import AsyncClient
|
||||||
|
from google.cloud.firestore_v1.async_collection import AsyncCollectionReference
|
||||||
|
from google.cloud.firestore_v1.async_document import AsyncDocumentReference
|
||||||
|
from app.repositories.abc import IDocumentStore
|
||||||
|
|
||||||
|
|
||||||
|
class Firestore(IDocumentStore):
|
||||||
|
def __init__(self, client: AsyncClient):
|
||||||
|
self._client = client
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def save_to_db(self, collection: str, item, doc_id: Optional[str] = None) -> Optional[str]:
|
||||||
|
collection_ref: AsyncCollectionReference = self._client.collection(collection)
|
||||||
|
|
||||||
|
if doc_id:
|
||||||
|
document_ref: AsyncDocumentReference = collection_ref.document(doc_id)
|
||||||
|
await document_ref.set(item)
|
||||||
|
doc_snapshot = await document_ref.get()
|
||||||
|
if doc_snapshot.exists:
|
||||||
|
self._logger.info(f"Document added with ID: {document_ref.id}")
|
||||||
|
return document_ref.id
|
||||||
|
else:
|
||||||
|
update_time, document_ref = await collection_ref.add(item)
|
||||||
|
if document_ref:
|
||||||
|
self._logger.info(f"Document added with ID: {document_ref.id}")
|
||||||
|
return document_ref.id
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def find(self, collection: str, query: Optional[Dict] = None) -> List[Dict]:
|
||||||
|
collection_ref: AsyncCollectionReference = self._client.collection(collection)
|
||||||
|
docs = []
|
||||||
|
async for doc in collection_ref.stream():
|
||||||
|
docs.append(doc.to_dict())
|
||||||
|
return docs
|
||||||
|
|
||||||
|
async def get_doc_by_id(self, collection: str, doc_id: str) -> Optional[Dict]:
|
||||||
|
collection_ref: AsyncCollectionReference = self._client.collection(collection)
|
||||||
|
doc_ref: AsyncDocumentReference = collection_ref.document(doc_id)
|
||||||
|
doc = await doc_ref.get()
|
||||||
|
|
||||||
|
if doc.exists:
|
||||||
|
return doc.to_dict()
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def update(self, collection: str, filter_query: Dict, update: Dict) -> Optional[str]:
|
||||||
|
raise NotImplemented()
|
||||||
41
app/repositories/impl/document_stores/mongo.py
Normal file
41
app/repositories/impl/document_stores/mongo.py
Normal file
@@ -0,0 +1,41 @@
|
|||||||
|
import logging
|
||||||
|
import uuid
|
||||||
|
from typing import Optional, List, Dict
|
||||||
|
|
||||||
|
from motor.motor_asyncio import AsyncIOMotorDatabase
|
||||||
|
|
||||||
|
from app.repositories.abc import IDocumentStore
|
||||||
|
|
||||||
|
|
||||||
|
class MongoDB(IDocumentStore):
|
||||||
|
|
||||||
|
def __init__(self, mongo_db: AsyncIOMotorDatabase):
|
||||||
|
self._mongo_db = mongo_db
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def save_to_db(self, collection: str, item, doc_id: Optional[str] = None) -> Optional[str]:
|
||||||
|
collection_ref = self._mongo_db[collection]
|
||||||
|
|
||||||
|
if doc_id is None:
|
||||||
|
doc_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
item['id'] = doc_id
|
||||||
|
|
||||||
|
result = await collection_ref.insert_one(item)
|
||||||
|
if result.inserted_id:
|
||||||
|
# returning id instead of _id
|
||||||
|
self._logger.info(f"Document added with ID: {doc_id}")
|
||||||
|
return doc_id
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def find(self, collection: str, query: Optional[Dict] = None) -> List[Dict]:
|
||||||
|
query = query if query else {}
|
||||||
|
cursor = self._mongo_db[collection].find(query)
|
||||||
|
return [document async for document in cursor]
|
||||||
|
|
||||||
|
async def update(self, collection: str, filter_query: Dict, update: Dict) -> Optional[str]:
|
||||||
|
return (await self._mongo_db[collection].update_one(filter_query, update)).upserted_id
|
||||||
|
|
||||||
|
async def get_doc_by_id(self, collection: str, doc_id: str) -> Optional[Dict]:
|
||||||
|
return await self._mongo_db[collection].find_one({"id": doc_id})
|
||||||
5
app/repositories/impl/file_storage/__init__.py
Normal file
5
app/repositories/impl/file_storage/__init__.py
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
from .firebase import FirebaseStorage
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"FirebaseStorage"
|
||||||
|
]
|
||||||
83
app/repositories/impl/file_storage/firebase.py
Normal file
83
app/repositories/impl/file_storage/firebase.py
Normal file
@@ -0,0 +1,83 @@
|
|||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import aiofiles
|
||||||
|
from httpx import AsyncClient
|
||||||
|
|
||||||
|
from app.repositories.abc import IFileStorage
|
||||||
|
|
||||||
|
|
||||||
|
class FirebaseStorage(IFileStorage):
|
||||||
|
|
||||||
|
def __init__(self, client: AsyncClient, token: str, bucket: str):
|
||||||
|
self._httpx_client = client
|
||||||
|
self._token = token
|
||||||
|
self._storage_url = f'https://firebasestorage.googleapis.com/v0/b/{bucket}'
|
||||||
|
self._logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
async def download_firebase_file(self, source_blob_name: str, destination_file_name: str) -> Optional[str]:
|
||||||
|
source_blob_name = source_blob_name.replace('/', '%2F')
|
||||||
|
download_url = f"{self._storage_url}/o/{source_blob_name}?alt=media"
|
||||||
|
|
||||||
|
response = await self._httpx_client.get(
|
||||||
|
download_url,
|
||||||
|
headers={'Authorization': f'Firebase {self._token}'}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
async with aiofiles.open(destination_file_name, 'wb') as file:
|
||||||
|
await file.write(response.content)
|
||||||
|
self._logger.info(f"File downloaded to {destination_file_name}")
|
||||||
|
return destination_file_name
|
||||||
|
else:
|
||||||
|
self._logger.error(f"Failed to download blob {source_blob_name}. {response.status_code} - {response.content}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def upload_file_firebase_get_url(self, destination_blob_name: str, source_file_name: str) -> Optional[str]:
|
||||||
|
destination_blob_name = destination_blob_name.replace('/', '%2F')
|
||||||
|
upload_url = f"{self._storage_url}/o/{destination_blob_name}"
|
||||||
|
|
||||||
|
async with aiofiles.open(source_file_name, 'rb') as file:
|
||||||
|
file_bytes = await file.read()
|
||||||
|
|
||||||
|
response = await self._httpx_client.post(
|
||||||
|
upload_url,
|
||||||
|
headers={
|
||||||
|
'Authorization': f'Firebase {self._token}',
|
||||||
|
"X-Goog-Upload-Protocol": "multipart"
|
||||||
|
},
|
||||||
|
files={
|
||||||
|
'metadata': (None, '{"metadata":{"test":"testMetadata"}}', 'application/json'),
|
||||||
|
'file': file_bytes
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self._logger.info(f"File {source_file_name} uploaded to {self._storage_url}/o/{destination_blob_name}.")
|
||||||
|
|
||||||
|
# TODO: Test this
|
||||||
|
#await self.make_public(destination_blob_name)
|
||||||
|
|
||||||
|
file_url = f"{self._storage_url}/o/{destination_blob_name}"
|
||||||
|
return file_url
|
||||||
|
else:
|
||||||
|
self._logger.error(f"Failed to upload file {source_file_name}. Error: {response.status_code} - {str(response.content)}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
async def make_public(self, destination_blob_name: str):
|
||||||
|
acl_url = f"{self._storage_url}/o/{destination_blob_name}/acl"
|
||||||
|
acl = {'entity': 'allUsers', 'role': 'READER'}
|
||||||
|
|
||||||
|
response = await self._httpx_client.post(
|
||||||
|
acl_url,
|
||||||
|
headers={
|
||||||
|
'Authorization': f'Bearer {self._token}',
|
||||||
|
'Content-Type': 'application/json'
|
||||||
|
},
|
||||||
|
json=acl
|
||||||
|
)
|
||||||
|
|
||||||
|
if response.status_code == 200:
|
||||||
|
self._logger.info(f"Blob {destination_blob_name} is now public.")
|
||||||
|
else:
|
||||||
|
self._logger.error(f"Failed to make blob {destination_blob_name} public. {response.status_code} - {response.content}")
|
||||||
156
app/server.py
Normal file
156
app/server.py
Normal file
@@ -0,0 +1,156 @@
|
|||||||
|
import json
|
||||||
|
import os
|
||||||
|
import pathlib
|
||||||
|
import logging.config
|
||||||
|
import logging.handlers
|
||||||
|
|
||||||
|
import aioboto3
|
||||||
|
import contextlib
|
||||||
|
from contextlib import asynccontextmanager
|
||||||
|
from collections import defaultdict
|
||||||
|
from typing import List
|
||||||
|
from http import HTTPStatus
|
||||||
|
|
||||||
|
import httpx
|
||||||
|
import whisper
|
||||||
|
from fastapi import FastAPI, Request
|
||||||
|
from fastapi.encoders import jsonable_encoder
|
||||||
|
from fastapi.exceptions import RequestValidationError
|
||||||
|
from fastapi.middleware import Middleware
|
||||||
|
from fastapi.middleware.cors import CORSMiddleware
|
||||||
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
import nltk
|
||||||
|
from starlette import status
|
||||||
|
|
||||||
|
from app.api import router
|
||||||
|
from app.configs import DependencyInjector
|
||||||
|
from app.exceptions import CustomException
|
||||||
|
from app.middlewares import AuthenticationMiddleware, AuthBackend
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def lifespan(_app: FastAPI):
|
||||||
|
"""
|
||||||
|
Startup and Shutdown logic is in this lifespan method
|
||||||
|
|
||||||
|
https://fastapi.tiangolo.com/advanced/events/
|
||||||
|
"""
|
||||||
|
# Whisper model
|
||||||
|
whisper_model = whisper.load_model("base")
|
||||||
|
|
||||||
|
# NLTK required datasets download
|
||||||
|
nltk.download('words')
|
||||||
|
nltk.download("punkt")
|
||||||
|
|
||||||
|
# AWS Polly client instantiation
|
||||||
|
context_stack = contextlib.AsyncExitStack()
|
||||||
|
session = aioboto3.Session()
|
||||||
|
polly_client = await context_stack.enter_async_context(
|
||||||
|
session.client(
|
||||||
|
'polly',
|
||||||
|
region_name='eu-west-1',
|
||||||
|
aws_secret_access_key=os.getenv("AWS_SECRET_ACCESS_KEY"),
|
||||||
|
aws_access_key_id=os.getenv("AWS_ACCESS_KEY_ID")
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
http_client = httpx.AsyncClient()
|
||||||
|
|
||||||
|
DependencyInjector(
|
||||||
|
polly_client,
|
||||||
|
http_client,
|
||||||
|
whisper_model
|
||||||
|
).inject()
|
||||||
|
|
||||||
|
# Setup logging
|
||||||
|
config_file = pathlib.Path("./app/configs/logging/logging_config.json")
|
||||||
|
with open(config_file) as f_in:
|
||||||
|
config = json.load(f_in)
|
||||||
|
|
||||||
|
logging.config.dictConfig(config)
|
||||||
|
|
||||||
|
yield
|
||||||
|
|
||||||
|
await http_client.aclose()
|
||||||
|
await polly_client.close()
|
||||||
|
await context_stack.aclose()
|
||||||
|
|
||||||
|
|
||||||
|
def setup_listeners(_app: FastAPI) -> None:
|
||||||
|
@_app.exception_handler(RequestValidationError)
|
||||||
|
async def custom_form_validation_error(request, exc):
|
||||||
|
"""
|
||||||
|
Don't delete request param
|
||||||
|
"""
|
||||||
|
reformatted_message = defaultdict(list)
|
||||||
|
for pydantic_error in exc.errors():
|
||||||
|
loc, msg = pydantic_error["loc"], pydantic_error["msg"]
|
||||||
|
filtered_loc = loc[1:] if loc[0] in ("body", "query", "path") else loc
|
||||||
|
field_string = ".".join(filtered_loc)
|
||||||
|
if field_string == "cookie.refresh_token":
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=401,
|
||||||
|
content={"error_code": 401, "message": HTTPStatus.UNAUTHORIZED.description},
|
||||||
|
)
|
||||||
|
reformatted_message[field_string].append(msg)
|
||||||
|
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
content=jsonable_encoder(
|
||||||
|
{"details": "Invalid request!", "errors": reformatted_message}
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
@_app.exception_handler(CustomException)
|
||||||
|
async def custom_exception_handler(request: Request, exc: CustomException):
|
||||||
|
"""
|
||||||
|
Don't delete request param
|
||||||
|
"""
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.code,
|
||||||
|
content={"error_code": exc.error_code, "message": exc.message},
|
||||||
|
)
|
||||||
|
|
||||||
|
@_app.exception_handler(Exception)
|
||||||
|
async def default_exception_handler(request: Request, exc: Exception):
|
||||||
|
"""
|
||||||
|
Don't delete request param
|
||||||
|
"""
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=500,
|
||||||
|
content=str(exc),
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def setup_middleware() -> List[Middleware]:
|
||||||
|
middleware = [
|
||||||
|
Middleware(
|
||||||
|
CORSMiddleware,
|
||||||
|
allow_origins=["*"],
|
||||||
|
allow_credentials=True,
|
||||||
|
allow_methods=["*"],
|
||||||
|
allow_headers=["*"],
|
||||||
|
),
|
||||||
|
Middleware(
|
||||||
|
AuthenticationMiddleware,
|
||||||
|
backend=AuthBackend()
|
||||||
|
)
|
||||||
|
]
|
||||||
|
return middleware
|
||||||
|
|
||||||
|
|
||||||
|
def create_app() -> FastAPI:
|
||||||
|
env = os.getenv("ENV")
|
||||||
|
_app = FastAPI(
|
||||||
|
docs_url="/docs" if env != "production" else None,
|
||||||
|
redoc_url="/redoc" if env != "production" else None,
|
||||||
|
middleware=setup_middleware(),
|
||||||
|
lifespan=lifespan
|
||||||
|
)
|
||||||
|
_app.include_router(router)
|
||||||
|
setup_listeners(_app)
|
||||||
|
return _app
|
||||||
|
|
||||||
|
|
||||||
|
app = create_app()
|
||||||
0
app/services/__init__.py
Normal file
0
app/services/__init__.py
Normal file
11
app/services/abc/__init__.py
Normal file
11
app/services/abc/__init__.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from .third_parties import *
|
||||||
|
from .exam import *
|
||||||
|
from .training import *
|
||||||
|
from .user import IUserService
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"IUserService"
|
||||||
|
]
|
||||||
|
__all__.extend(third_parties.__all__)
|
||||||
|
__all__.extend(exam.__all__)
|
||||||
|
__all__.extend(training.__all__)
|
||||||
17
app/services/abc/exam/__init__.py
Normal file
17
app/services/abc/exam/__init__.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from .level import ILevelService
|
||||||
|
from .listening import IListeningService
|
||||||
|
from .writing import IWritingService
|
||||||
|
from .speaking import ISpeakingService
|
||||||
|
from .reading import IReadingService
|
||||||
|
from .grade import IGradeService
|
||||||
|
from .exercises import IExerciseService
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ILevelService",
|
||||||
|
"IListeningService",
|
||||||
|
"IWritingService",
|
||||||
|
"ISpeakingService",
|
||||||
|
"IReadingService",
|
||||||
|
"IGradeService",
|
||||||
|
"IExerciseService"
|
||||||
|
]
|
||||||
33
app/services/abc/exam/exercises.py
Normal file
33
app/services/abc/exam/exercises.py
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, Any
|
||||||
|
|
||||||
|
|
||||||
|
class IExerciseService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_multiple_choice(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_blank_space_text(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_passage_utas(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_writing_task(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_speaking_task(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_task(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_listening_task(self, args: Dict, exercise_id: int) -> Dict[str, Any]:
|
||||||
|
pass
|
||||||
13
app/services/abc/exam/grade.py
Normal file
13
app/services/abc/exam/grade.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
class IGradeService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_short_answers(self, data: Dict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def calculate_grading_summary(self, extracted_sections: List):
|
||||||
|
pass
|
||||||
51
app/services/abc/exam/level.py
Normal file
51
app/services/abc/exam/level.py
Normal file
@@ -0,0 +1,51 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
import random
|
||||||
|
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
|
||||||
|
from app.configs.constants import EducationalContent
|
||||||
|
|
||||||
|
|
||||||
|
class ILevelService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_exercises(self, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_level_exam(
|
||||||
|
self, number_of_exercises: int = 25, min_timer: int = 25, diagnostic: bool = False
|
||||||
|
) -> Dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_level_utas(self):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_custom_level(self, data: Dict):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def upload_level(self, upload: UploadFile) -> Dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def gen_multiple_choice(
|
||||||
|
self, mc_variant: str, quantity: int, start_id: int = 1 #, *, utas: bool = False, all_exams=None
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def gen_blank_space_text_utas(
|
||||||
|
self, quantity: int, start_id: int, size: int, topic: str
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def gen_reading_passage_utas(
|
||||||
|
self, start_id, mc_quantity: int, topic: Optional[str] #sa_quantity: int,
|
||||||
|
):
|
||||||
|
pass
|
||||||
29
app/services/abc/exam/listening.py
Normal file
29
app/services/abc/exam/listening.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
import queue
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from queue import Queue
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from fastapi import UploadFile
|
||||||
|
|
||||||
|
|
||||||
|
class IListeningService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_listening_dialog( self, section_id: int, topic: str, difficulty: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_listening_question(self, section: int, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_mp3(self, dto) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_dialog_from_audio(self, upload: UploadFile):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def save_listening(self, parts: list[dict], min_timer: int, difficulty: str, listening_id: str) -> Dict:
|
||||||
|
pass
|
||||||
17
app/services/abc/exam/reading.py
Normal file
17
app/services/abc/exam/reading.py
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from fastapi import UploadFile
|
||||||
|
|
||||||
|
|
||||||
|
class IReadingService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def import_exam(self, exercises: UploadFile, solutions: UploadFile = None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_exercises(self, dto):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_reading_passage(self, part: int, topic: str, word_count: int = 800):
|
||||||
|
pass
|
||||||
29
app/services/abc/exam/speaking.py
Normal file
29
app/services/abc/exam/speaking.py
Normal file
@@ -0,0 +1,29 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class ISpeakingService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_speaking_part(
|
||||||
|
self, part: int, topic: str, difficulty: str, second_topic: Optional[str] = None
|
||||||
|
) -> Dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_speaking_task(self, task: int, answers: List[Dict]) -> Dict:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def create_videos_and_save_to_db(self, exercises: List[Dict], template: Dict, req_id: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def generate_video(
|
||||||
|
self, part: int, avatar: str, topic: str, questions: list[str],
|
||||||
|
*,
|
||||||
|
second_topic: Optional[str] = None,
|
||||||
|
prompts: Optional[list[str]] = None,
|
||||||
|
suffix: Optional[str] = None,
|
||||||
|
):
|
||||||
|
pass
|
||||||
11
app/services/abc/exam/writing.py
Normal file
11
app/services/abc/exam/writing.py
Normal file
@@ -0,0 +1,11 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
class IWritingService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def get_writing_task_general_question(self, task: int, topic: str, difficulty: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def grade_writing_task(self, task: int, question: str, answer: str):
|
||||||
|
pass
|
||||||
13
app/services/abc/third_parties/__init__.py
Normal file
13
app/services/abc/third_parties/__init__.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from .stt import ISpeechToTextService
|
||||||
|
from .tts import ITextToSpeechService
|
||||||
|
from .llm import ILLMService
|
||||||
|
from .vid_gen import IVideoGeneratorService
|
||||||
|
from .ai_detector import IAIDetectorService
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"ISpeechToTextService",
|
||||||
|
"ITextToSpeechService",
|
||||||
|
"ILLMService",
|
||||||
|
"IVideoGeneratorService",
|
||||||
|
"IAIDetectorService"
|
||||||
|
]
|
||||||
13
app/services/abc/third_parties/ai_detector.py
Normal file
13
app/services/abc/third_parties/ai_detector.py
Normal file
@@ -0,0 +1,13 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class IAIDetectorService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def run_detection(self, text: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def _parse_detection(self, response: Dict) -> Optional[Dict]:
|
||||||
|
pass
|
||||||
38
app/services/abc/third_parties/llm.py
Normal file
38
app/services/abc/third_parties/llm.py
Normal file
@@ -0,0 +1,38 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import List, Optional, TypeVar, Callable
|
||||||
|
|
||||||
|
from openai.types.chat import ChatCompletionMessageParam
|
||||||
|
from pydantic import BaseModel
|
||||||
|
|
||||||
|
T = TypeVar('T', bound=BaseModel)
|
||||||
|
|
||||||
|
class ILLMService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def prediction(
|
||||||
|
self,
|
||||||
|
model: str,
|
||||||
|
messages: List,
|
||||||
|
fields_to_check: Optional[List[str]],
|
||||||
|
temperature: float,
|
||||||
|
check_blacklisted: bool = True,
|
||||||
|
token_count: int = -1
|
||||||
|
):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def prediction_override(self, **kwargs):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def pydantic_prediction(
|
||||||
|
self,
|
||||||
|
messages: List[ChatCompletionMessageParam],
|
||||||
|
map_to_model: Callable,
|
||||||
|
json_scheme: str,
|
||||||
|
*,
|
||||||
|
model: Optional[str] = None,
|
||||||
|
temperature: Optional[float] = None,
|
||||||
|
max_retries: int = 3
|
||||||
|
) -> List[T] | T | None:
|
||||||
|
pass
|
||||||
8
app/services/abc/third_parties/stt.py
Normal file
8
app/services/abc/third_parties/stt.py
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
|
||||||
|
|
||||||
|
class ISpeechToTextService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def speech_to_text(self, file_path):
|
||||||
|
pass
|
||||||
22
app/services/abc/third_parties/tts.py
Normal file
22
app/services/abc/third_parties/tts.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Union
|
||||||
|
|
||||||
|
|
||||||
|
class ITextToSpeechService(ABC):
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def synthesize_speech(self, text: str, voice: str, engine: str, output_format: str):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def text_to_speech(self, dialog) -> bytes:
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def _conversation_to_speech(self, conversation: list):
|
||||||
|
pass
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
async def _text_to_speech(self, text: str):
|
||||||
|
pass
|
||||||
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user