Async release
This commit is contained in:
23
Dockerfile
23
Dockerfile
@@ -1,6 +1,10 @@
|
||||
FROM python:3.11-slim as requirements-stage
|
||||
WORKDIR /tmp
|
||||
RUN pip install poetry
|
||||
COPY pyproject.toml ./poetry.lock* /tmp/
|
||||
RUN poetry export -f requirements.txt --output requirements.txt --without-hashes
|
||||
|
||||
|
||||
# Use the official lightweight Python image.
|
||||
# https://hub.docker.com/_/python
|
||||
FROM python:3.11-slim
|
||||
|
||||
# Allow statements and log messages to immediately appear in the logs
|
||||
@@ -9,18 +13,25 @@ ENV PYTHONUNBUFFERED True
|
||||
# Copy local code to the container image.
|
||||
ENV APP_HOME /app
|
||||
WORKDIR $APP_HOME
|
||||
|
||||
COPY . ./
|
||||
|
||||
COPY --from=requirements-stage /tmp/requirements.txt /app/requirements.txt
|
||||
|
||||
RUN apt update && apt install -y ffmpeg
|
||||
|
||||
# Install production dependencies.
|
||||
RUN pip install --no-cache-dir -r requirements.txt
|
||||
RUN pip install openai-whisper
|
||||
|
||||
EXPOSE 5000
|
||||
# openai-whisper model in not compatible with the newer 2.0.0 numpy release
|
||||
RUN pip install --upgrade numpy<2
|
||||
|
||||
RUN pip install --no-cache-dir -r /app/requirements.txt
|
||||
|
||||
EXPOSE 8000
|
||||
|
||||
# Run the web service on container startup. Here we use the gunicorn
|
||||
# webserver, with one worker process and 8 threads.
|
||||
# For environments with multiple CPU cores, increase the number of workers
|
||||
# to be equal to the cores available.
|
||||
# Timeout is set to 0 to disable the timeouts of the workers to allow Cloud Run to handle instance scaling.
|
||||
CMD exec gunicorn --bind 0.0.0.0:5000 --workers 1 --threads 8 --timeout 0 app:app
|
||||
CMD exec uvicorn --bind 0.0.0.0:8000 --workers 1 --threads 8 --timeout 0 app.server:app
|
||||
|
||||
Reference in New Issue
Block a user