test-docker / Dockerfile
sofianhw's picture
add api server and openapi
690332d
raw
history blame
1.4 kB
FROM --platform=amd64 nvcr.io/nvidia/cuda:12.1.0-devel-ubuntu22.04 as base
RUN apt update && \
apt install -y python3-pip python3-packaging \
git ninja-build && \
pip3 install -U pip
# Tweak this list to reduce build time
# https://developer.nvidia.com/cuda-gpus
ENV TORCH_CUDA_ARCH_LIST "7.0;7.2;7.5;8.0;8.6;8.9;9.0"
RUN pip3 install "torch==2.1.1"
# This build is slow but NVIDIA does not provide binaries. Increase MAX_JOBS as needed.
# RUN pip3 install "git+https://github.com/stanford-futuredata/megablocks.git"
RUN pip3 install vllm
RUN pip3 install openai
RUN pip3 install "xformers==0.0.23" "transformers==4.36.0" "fschat[model_worker]==0.2.34"
RUN git clone https://github.com/NVIDIA/apex && \
cd apex && git checkout 2386a912164b0c5cfcd8be7a2b890fbac5607c82 && \
sed -i '/check_cuda_torch_binary_vs_bare_metal(CUDA_HOME)/d' setup.py && \
python3 setup.py install --cpp_ext --cuda_ext
# Set up a new user named "user" with user ID 1000
RUN useradd -m -u 1000 user
# Switch to the "user" user
USER user
# Set home to the user's home directory
ENV HOME=/home/user \
PATH=/home/user/.local/bin:$PATH
# Set the working directory to the user's home directory
WORKDIR $HOME/app
COPY --chown=user . $HOME/app
COPY --chown=user entrypoint.sh $HOME/app/
RUN ls -la $HOME/app/
RUN chmod +x $HOME/app/entrypoint.sh
ENTRYPOINT ["/home/user/app/entrypoint.sh"]