FROM python:3.9 | |
WORKDIR /app | |
# Install system dependencies | |
RUN apt-get update && apt-get install -y \ | |
git \ | |
build-essential \ | |
wget \ | |
&& rm -rf /var/lib/apt/lists/* | |
# Clone llama.cpp and build it | |
RUN git clone https://github.com/ggerganov/llama.cpp.git /app/llama.cpp | |
RUN cd /app/llama.cpp/models && \ | |
wget -O stablelm-2-zephyr-1_6b-Q4_0.gguf https://huggingface.co/stabilityai/stablelm-2-zephyr-1_6b/resolve/main/stablelm-2-zephyr-1_6b-Q4_0.gguf?download=true | |
RUN cd /app/llama.cpp && \ | |
make -j | |
# Create a virtual environment and activate it | |
RUN python -m venv /opt/venv | |
ENV PATH="/opt/venv/bin:$PATH" | |
COPY ./requirements.txt /app/requirements.txt | |
RUN pip install --no-cache-dir --upgrade -r /app/requirements.txt | |
# Set up a new user named "user" with user ID 1000 | |
RUN useradd -m -u 1000 user | |
# Switch to the "user" user | |
USER user | |
# Set home to the user's home directory | |
ENV HOME=/home/user \ | |
PATH=/home/user/.local/bin:$PATH \ | |
PYTHONPATH=$HOME/app \ | |
PYTHONUNBUFFERED=1 \ | |
GRADIO_ALLOW_FLAGGING=never \ | |
GRADIO_NUM_PORTS=1 \ | |
GRADIO_SERVER_NAME=0.0.0.0 \ | |
GRADIO_THEME=huggingface \ | |
SYSTEM=spaces | |
# Set the working directory to the user's home directory | |
WORKDIR $HOME/app | |
# Copy the current directory contents into the container at $HOME/app setting the owner to the user | |
COPY --chown=user . $HOME/app | |
CMD ["python", "app.py"] | |