diff --git a/.env b/.env index 979e608..0368d65 100644 --- a/.env +++ b/.env @@ -4,3 +4,10 @@ JWT_TEST_TOKEN=eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiJ0ZXN0In0.Emrs2D3B GOOGLE_APPLICATION_CREDENTIALS=firebase-configs/storied-phalanx-349916.json HEY_GEN_TOKEN=MjY4MDE0MjdjZmNhNDFmYTlhZGRkNmI3MGFlMzYwZDItMTY5NTExNzY3MA== GPT_ZERO_API_KEY=0195b9bb24c5439899f71230809c74af + +FIREBASE_SCRYPT_B64_SIGNER_KEY="qjo/b5U5oNxA8o+PHFMZx/ZfG8ZQ7688zYmwMOcfZvVjOM6aHe4Jf270xgyrVArqLIQwFi7VkFnbysBjueMbVw==" +FIREBASE_SCRYPT_B64_SALT_SEPARATOR="Bw==" +FIREBASE_SCRYPT_ROUNDS=8 +FIREBASE_SCRYPT_MEM_COST=14 +FIREBASE_PROJECT_ID=encoach-staging +FIREBASE_CLI_TOKEN="1//03QsSa_vSDOqoCgYIARAAGAMSNwF-L9IrA0nxAZtglYBbiYqnKB9dayShPlN6xMQVI4PALPGIwr8hSpcyRuaZL0dUH-Qa7pohefo" diff --git a/Dockerfile b/Dockerfile index 5c9b4e8..939b732 100644 --- a/Dockerfile +++ b/Dockerfile @@ -20,9 +20,15 @@ RUN apt update && apt install -y \ texlive-xetex \ pandoc \ librsvg2-bin \ + curl \ && rm -rf /var/lib/apt/lists/* +RUN curl -sL https://deb.nodesource.com/setup_20.x | bash - \ + && apt-get install -y nodejs + +RUN npm install -g firebase-tools + # Install production dependencies. RUN pip install --no-cache-dir -r requirements.txt diff --git a/app.py b/app.py index 64fe488..1f24a5d 100644 --- a/app.py +++ b/app.py @@ -21,6 +21,7 @@ from heygen.AvatarEnum import AvatarEnum from modules import GPT from modules.training_content import TrainingContentService, TrainingContentKnowledgeBase from modules.upload_level import UploadLevelService +from modules.batch_users import BatchUsers load_dotenv() @@ -48,6 +49,8 @@ tc_service = TrainingContentService(kb, open_ai, firestore_client) upload_level_service = UploadLevelService(open_ai) +batch_users_service = BatchUsers(firestore_client) + thread_event = threading.Event() # Configure logging @@ -1695,8 +1698,7 @@ def grading_summary(): @jwt_required() def training_content(): try: - data = request.get_json() - return tc_service.get_tips(data) + return tc_service.get_tips(request.get_json()) except Exception as e: app.logger.error(str(e)) return str(e) @@ -1715,5 +1717,14 @@ def upload_file(): return upload_level_service.generate_level_from_file(file), 200 +@app.route('/batch_users', methods=['POST']) +def create_users_batch(): + try: + return batch_users_service.batch_users(request.get_json()) + except Exception as e: + app.logger.error(str(e)) + return str(e) + + if __name__ == '__main__': app.run() diff --git a/modules/batch_users/__init__.py b/modules/batch_users/__init__.py new file mode 100644 index 0000000..1a9d28f --- /dev/null +++ b/modules/batch_users/__init__.py @@ -0,0 +1,5 @@ +from .service import BatchUsers + +__all__ = [ + "BatchUsers" +] diff --git a/modules/batch_users/batch_users.py b/modules/batch_users/batch_users.py new file mode 100644 index 0000000..6e94e0c --- /dev/null +++ b/modules/batch_users/batch_users.py @@ -0,0 +1,31 @@ +import uuid +from typing import Optional + +from pydantic import BaseModel, Field +from datetime import datetime + + +class DemographicInfo(BaseModel): + phone: str + passport_id: Optional[str] = None + country: Optional[str] = None + + +class UserDTO(BaseModel): + id: uuid.UUID = Field(default_factory=uuid.uuid4) + email: str + name: str + type: str + passport_id: str + passwordHash: str + passwordSalt: str + groupName: Optional[str] = None + corporate: Optional[str] = None + studentID: Optional[str] = None + expiryDate: Optional[str] = None + demographicInformation: Optional[DemographicInfo] = None + + +class BatchUsersDTO(BaseModel): + makerID: str + users: list[UserDTO] diff --git a/modules/batch_users/service.py b/modules/batch_users/service.py new file mode 100644 index 0000000..f4550f4 --- /dev/null +++ b/modules/batch_users/service.py @@ -0,0 +1,251 @@ +import os +import subprocess +import time +import uuid +from datetime import datetime +from logging import getLogger + +import pandas as pd +from typing import Dict + +import shortuuid +from google.cloud.firestore_v1 import Client +from google.cloud.firestore_v1.base_query import FieldFilter + +from modules.batch_users.batch_users import BatchUsersDTO, UserDTO +from modules.helper.file_helper import FileHelper + + +class BatchUsers: + + _DEFAULT_DESIRED_LEVELS = { + "reading": 9, + "listening": 9, + "writing": 9, + "speaking": 9, + } + + _DEFAULT_LEVELS = { + "reading": 0, + "listening": 0, + "writing": 0, + "speaking": 0, + } + + def __init__(self, firestore: Client): + self._db = firestore + self._logger = getLogger(__name__) + + def batch_users(self, request_data: Dict): + batch_dto = self._map_to_batch(request_data) + + file_name = f'{uuid.uuid4()}.csv' + path = f'./tmp/{file_name}' + self._generate_firebase_auth_csv(batch_dto, path) + + result = self._upload_users('./tmp', file_name) + if result.returncode != 0: + error_msg = f"Couldn't upload users. Failed to run command firebase auth import -> ```cmd {result.stderr}```" + self._logger.error(error_msg) + return error_msg + + self._init_users(batch_dto) + + FileHelper.remove_file(path) + return {"ok": True} + + @staticmethod + def _map_to_batch(request_data: Dict) -> BatchUsersDTO: + users: list[UserDTO] = [UserDTO(**user) for user in request_data["users"]] + return BatchUsersDTO(makerID=request_data["makerID"], users=users) + + @staticmethod + def _generate_firebase_auth_csv(batch_dto: BatchUsersDTO, path: str): + # https://firebase.google.com/docs/cli/auth#file_format + columns = [ + 'UID', 'Email', 'Email Verified', 'Password Hash', 'Password Salt', 'Name', + 'Photo URL', 'Google ID', 'Google Email', 'Google Display Name', 'Google Photo URL', + 'Facebook ID', 'Facebook Email', 'Facebook Display Name', 'Facebook Photo URL', + 'Twitter ID', 'Twitter Email', 'Twitter Display Name', 'Twitter Photo URL', + 'GitHub ID', 'GitHub Email', 'GitHub Display Name', 'GitHub Photo URL', + 'User Creation Time', 'Last Sign-In Time', 'Phone Number' + ] + users_data = [] + + current_time = int(time.time() * 1000) + + for user in batch_dto.users: + user_data = { + 'UID': str(user.id), + 'Email': user.email, + 'Email Verified': False, + 'Password Hash': user.passwordHash, + 'Password Salt': user.passwordSalt, + 'Name': '', + 'Photo URL': '', + 'Google ID': '', + 'Google Email': '', + 'Google Display Name': '', + 'Google Photo URL': '', + 'Facebook ID': '', + 'Facebook Email': '', + 'Facebook Display Name': '', + 'Facebook Photo URL': '', + 'Twitter ID': '', + 'Twitter Email': '', + 'Twitter Display Name': '', + 'Twitter Photo URL': '', + 'GitHub ID': '', + 'GitHub Email': '', + 'GitHub Display Name': '', + 'GitHub Photo URL': '', + 'User Creation Time': current_time, + 'Last Sign-In Time': '', + 'Phone Number': '' + } + users_data.append(user_data) + + df = pd.DataFrame(users_data, columns=columns) + df.to_csv(path, index=False, header=False) + + @staticmethod + def _upload_users(directory: str, file_name: str): + command = ( + f'firebase auth:import {file_name} ' + f'--hash-algo=SCRYPT ' + f'--hash-key={os.getenv("FIREBASE_SCRYPT_B64_SIGNER_KEY")} ' + f'--salt-separator={os.getenv("FIREBASE_SCRYPT_B64_SALT_SEPARATOR")} ' + f'--rounds={os.getenv("FIREBASE_SCRYPT_ROUNDS")} ' + f'--mem-cost={os.getenv("FIREBASE_SCRYPT_MEM_COST")} ' + f'--project={os.getenv("FIREBASE_PROJECT_ID")} ' + f'--token={os.getenv("FIREBASE_CLI_TOKEN")}' + ) + + result = subprocess.run(command, shell=True, cwd=directory, capture_output=True, text=True) + return result + + def _init_users(self, batch_users: BatchUsersDTO): + maker_id = batch_users.makerID + for user in batch_users.users: + self._insert_new_user(user) + code = self._create_code(user, maker_id) + + if user.type == "corporate": + self._set_corporate_default_groups(user) + + if user.corporate: + self._assign_corporate_to_user(user, code) + + if user.groupName and len(user.groupName.strip()) > 0: + self._assign_user_to_group_by_name(user, maker_id) + + def _insert_new_user(self, user: UserDTO): + new_user = { + **user.dict(exclude={ + 'id', 'passport_id', 'groupName', 'expiryDate', + 'corporate', 'passwordHash', 'passwordSalt' + }), + 'bio': "", + 'focus': "academic", + 'status': "active", + 'desiredLevels': self._DEFAULT_DESIRED_LEVELS, + 'profilePicture': "/defaultAvatar.png", + 'levels': self._DEFAULT_LEVELS, + 'isFirstLogin': False, + 'isVerified': True, + 'registrationDate': datetime.now(), + 'subscriptionExpirationDate': user.expiryDate + } + self._db.collection('users').document(str(user.id)).set(new_user) + + def _create_code(self, user: UserDTO, maker_id: str) -> str: + code = shortuuid.ShortUUID().random(length=6) + self._db.collection('codes').document(code).set({ + 'code': code, + 'creator': maker_id, + 'expiryDate': user.expiryDate, + 'type': user.type, + 'creationDate': datetime.now(), + 'userId': str(user.id), + 'email': user.email, + 'name': user.name, + 'passport_id': user.passport_id + }) + return code + + def _set_corporate_default_groups(self, user: UserDTO): + user_id = str(user.id) + default_groups = [ + { + 'admin': user_id, + 'id': str(uuid.uuid4()), + 'name': "Teachers", + 'participants': [], + 'disableEditing': True, + }, + { + 'admin': user_id, + 'id': str(uuid.uuid4()), + 'name': "Students", + 'participants': [], + 'disableEditing': True, + }, + { + 'admin': user_id, + 'id': str(uuid.uuid4()), + 'name': "Corporate", + 'participants': [], + 'disableEditing': True, + } + ] + for group in default_groups: + self._db.collection('groups').document(group['id']).set(group) + + def _assign_corporate_to_user(self, user: UserDTO, code: str): + user_id = str(user.id) + corporate_users = self._db.collection('users').where( + filter=FieldFilter('email', '==', user.corporate) + ).limit(1).get() + if len(corporate_users) > 0: + corporate_user = corporate_users[0] + self._db.collection('codes').document(code).set({'creator': corporate_user.id}, merge=True) + + group_type = "Students" if user.type == "student" else "Teachers" + + groups = self._db.collection('groups').where( + filter=FieldFilter('admin', '==', corporate_user.id) + ).where( + filter=FieldFilter('name', '==', group_type) + ).limit(1).get() + + if len(groups) > 0: + group = groups[0] + participants = group.get('participants') + if user_id not in participants: + participants.append(user_id) + group.reference.update({'participants': participants}) + + def _assign_user_to_group_by_name(self, user: UserDTO, maker_id: str): + user_id = str(user.id) + + groups = self._db.collection('groups').where( + filter=FieldFilter('admin', '==', maker_id) + ).where( + filter=FieldFilter('name', '==', user.groupName.strip()) + ).limit(1).get() + + if len(groups) == 0: + new_group = { + 'id': str(uuid.uuid4()), + 'admin': maker_id, + 'name': user.groupName.strip(), + 'participants': [user_id], + 'disableEditing': False, + } + self._db.collection('groups').document(new_group['id']).set(new_group) + else: + group = groups[0] + participants = group.get('participants') + if user_id not in participants: + participants.append(user_id) + group.reference.update({'participants': participants}) diff --git a/modules/helper/file_helper.py b/modules/helper/file_helper.py index 9008127..2c97faa 100644 --- a/modules/helper/file_helper.py +++ b/modules/helper/file_helper.py @@ -3,7 +3,8 @@ import io import os import shutil import subprocess -from typing import Optional +import uuid +from typing import Optional, Tuple import numpy as np import pypandoc @@ -75,3 +76,22 @@ class FileHelper: shutil.rmtree(path) except Exception as e: print(f"An error occurred while trying to remove {path}: {str(e)}") + + @staticmethod + def remove_file(file_path): + try: + if os.path.exists(file_path): + if os.path.isfile(file_path): + os.remove(file_path) + except Exception as e: + print(f"An error occurred while trying to remove the file {file_path}: {str(e)}") + + @staticmethod + def save_upload(file) -> Tuple[str, str]: + ext = file.filename.split('.')[-1] + path_id = str(uuid.uuid4()) + os.makedirs(f'./tmp/{path_id}', exist_ok=True) + + tmp_filename = f'./tmp/{path_id}/uploaded.{ext}' + file.save(tmp_filename) + return ext, path_id diff --git a/modules/upload_level/service.py b/modules/upload_level/service.py index 85c46d1..6a493b9 100644 --- a/modules/upload_level/service.py +++ b/modules/upload_level/service.py @@ -21,7 +21,7 @@ class UploadLevelService: self._llm = openai def generate_level_from_file(self, file) -> Dict[str, Any] | None: - ext, path_id = self._save_upload(file) + ext, path_id = FileHelper.save_upload(file) FileHelper.convert_file_to_pdf( f'./tmp/{path_id}/uploaded.{ext}', f'./tmp/{path_id}/exercises.pdf' ) @@ -48,16 +48,6 @@ class UploadLevelService: return True return False - @staticmethod - def _save_upload(file) -> Tuple[str, str]: - ext = file.filename.split('.')[-1] - path_id = str(uuid.uuid4()) - os.makedirs(f'./tmp/{path_id}', exist_ok=True) - - tmp_filename = f'./tmp/{path_id}/uploaded.{ext}' - file.save(tmp_filename) - return ext, path_id - def _level_json_schema(self): return { "parts": [ @@ -392,4 +382,4 @@ class UploadLevelService: exercise["words"][i]["id"] = counter exercise["solutions"][i]["id"] = counter counter += 1 - return response \ No newline at end of file + return response diff --git a/requirements.txt b/requirements.txt index 8afd38d..aa784cd 100644 Binary files a/requirements.txt and b/requirements.txt differ