|
1 | | -# Dockerfile |
| 1 | +# syntax=docker/dockerfile:1 |
2 | 2 |
|
3 | | -# Use a base image with Python |
4 | | -FROM python:3.10.14 |
| 3 | +# Comments are provided throughout this file to help you get started. |
| 4 | +# If you need more help, visit the Dockerfile reference guide at |
| 5 | +# https://docs.docker.com/go/dockerfile-reference/ |
5 | 6 |
|
6 | | -# Set environment variables |
7 | | -ENV PYTHONDONTWRITEBYTECODE=1 |
8 | | -ENV PYTHONUNBUFFERED=1 |
| 7 | +# Want to help us make this template better? Share your feedback here: https://forms.gle/ybq9Krt8jtBL3iCk7 |
9 | 8 |
|
10 | | -# Set the working directory |
11 | | -WORKDIR /app |
| 9 | +ARG PYTHON_VERSION=3.10.14 |
| 10 | +FROM python:${PYTHON_VERSION} as base |
12 | 11 |
|
13 | | -# Install system dependencies |
14 | | -RUN apt-get update && \ |
15 | | - apt-get install -y jq && \ |
16 | | - apt-get clean |
| 12 | +# Prevents Python from writing pyc files. |
| 13 | +ENV PYTHONDONTWRITEBYTECODE=1 |
17 | 14 |
|
18 | | -# Copy the Poetry lock files and install Poetry |
19 | | -COPY pyproject.toml poetry.lock ./ |
20 | | -RUN pip install poetry && poetry config virtualenvs.create false && poetry install --no-dev |
21 | | -# Install ollama |
22 | | -RUN curl -fsSL https://ollama.com/install.sh | sh |
| 15 | +# Keeps Python from buffering stdout and stderr to avoid situations where |
| 16 | +# the application crashes without emitting any logs due to buffering. |
| 17 | +ENV PYTHONUNBUFFERED=1 |
23 | 18 |
|
24 | | -RUN ollama serve& |
25 | | -# RUN while [ "$(ollama list | grep 'NAME')" == "" ]; do sleep 1 done |
26 | | -# RUN until ollama list | grep -q 'NAME'; do sleep 1; done |
27 | | -# RUN timeout 120 bash -c 'until ollama list | grep -q "NAME"; do sleep 1; done' |
28 | | -RUN ollama serve & sleep 5 && ollama run llama3 |
| 19 | +WORKDIR /app |
29 | 20 |
|
| 21 | +# Create a non-privileged user that the app will run under. |
| 22 | +# See https://docs.docker.com/go/dockerfile-user-best-practices/ |
| 23 | +# ARG UID=10001 |
| 24 | +# RUN adduser \ |
| 25 | +# --disabled-password \ |
| 26 | +# --gecos "" \ |
| 27 | +# --home "/nonexistent" \ |
| 28 | +# --shell "/sbin/nologin" \ |
| 29 | +# --no-create-home \ |
| 30 | +# --uid "${UID}" \ |
| 31 | +# appuser |
30 | 32 |
|
| 33 | +# Download dependencies as a separate step to take advantage of Docker's caching. |
| 34 | +# Leverage a cache mount to /root/.cache/pip to speed up subsequent builds. |
| 35 | +# Leverage a bind mount to requirements.txt to avoid having to copy them into |
| 36 | +# into this layer. |
| 37 | +RUN --mount=type=cache,target=/root/.cache/pip \ |
| 38 | + --mount=type=bind,source=requirements.txt,target=requirements.txt \ |
| 39 | + python -m pip install -r requirements.txt |
31 | 40 |
|
32 | | -# RUN ollama pull llama3 |
| 41 | +# Switch to the non-privileged user to run the application. |
| 42 | +# USER appuser |
33 | 43 |
|
34 | | -# Copy the application code |
35 | | -COPY . . |
| 44 | +# Copy the source code into the container. |
| 45 | +COPY ./data ./data |
| 46 | +COPY ./backend ./backend |
| 47 | +COPY ./documentation_bot ./documentation_bot |
| 48 | +COPY ./frontend ./frontend |
| 49 | +COPY ./structured_query ./structured_query |
| 50 | +COPY ./ollama ./ollama |
| 51 | +COPY ./llm_service ./llm_service |
| 52 | +COPY ./start_docker_local.sh ./start_docker_local.sh |
| 53 | +COPY ./start_local.sh ./start_local.sh |
| 54 | +COPY ./start_training.sh ./start_training.sh |
| 55 | +COPY ./stop_docker.sh ./stop_docker.sh |
36 | 56 |
|
37 | | -# Expose the necessary ports |
38 | | -EXPOSE 8000 8081 8083 8050 11434 8501 |
| 57 | +RUN curl -fsSL https://ollama.com/install.sh | sh |
| 58 | +RUN ollama serve & sleep 5 && ollama run llama3 |
| 59 | +# Expose the port that the application listens on. |
| 60 | +EXPOSE 8000 |
| 61 | +EXPOSE 8081 |
| 62 | +EXPOSE 8083 |
| 63 | +EXPOSE 8501 |
| 64 | +EXPOSE 11434 |
39 | 65 |
|
40 | | -# Start the application |
41 | | -CMD ["bash", "start_docker_local.sh"] |
| 66 | +# Run the application. |
| 67 | +# CMD uvicorn 'backend.backend:app' --host=0.0.0.0 --port=8000 |
| 68 | +# CMD [ "ls" ] |
| 69 | +CMD [ "./start_docker_local.sh" ] |
0 commit comments