Spaces:
Running
on
A100
Running
on
A100
ARG UBUNTU_VERSION=22.04 | |
ARG CUDA_VERSION=12.3.1 | |
ARG BASE_CUDA_DEV_CONTAINER=nvidia/cuda:${CUDA_VERSION}-devel-ubuntu${UBUNTU_VERSION} | |
ARG BASE_CUDA_RUN_CONTAINER=nvidia/cuda:${CUDA_VERSION}-runtime-ubuntu${UBUNTU_VERSION} | |
FROM ${BASE_CUDA_DEV_CONTAINER} as build | |
ARG CUDA_DOCKER_ARCH=all | |
RUN apt-get update --fix-missing && \ | |
apt-get install -y --no-install-recommends git build-essential gcc cmake && \ | |
rm -rf /var/lib/apt/lists/* | |
WORKDIR /build | |
RUN git clone https://github.com/ggerganov/llama.cpp.git | |
WORKDIR /build/llama.cpp | |
ENV CUDA_DOCKER_ARCH=${CUDA_DOCKER_ARCH} | |
ENV LLAMA_CUBLAS=1 | |
RUN mkdir build && \ | |
cd build && \ | |
cmake .. -DLLAMA_CUBLAS=ON && \ | |
cmake --build . --config Release | |
FROM ${BASE_CUDA_RUN_CONTAINER} as runtime | |
RUN apt-get update --fix-missing && \ | |
apt-get install -y --no-install-recommends wget && \ | |
rm -rf /var/lib/apt/lists/* | |
WORKDIR /app | |
# Copy the executable from the build stage | |
COPY --from=build /build/llama.cpp/build/bin/server /app | |
COPY --from=build /build/llama.cpp/examples/server/public /app/public | |
COPY ./run.sh /app/run.sh | |
WORKDIR /app | |
EXPOSE 7867 | |
# Make the script executable | |
RUN chmod +x run.sh | |
# CMD to run your script | |
CMD ./run.sh |