|
FROM nvidia/cuda:11.3.1-cudnn8-devel-ubuntu18.04 |
|
|
|
ARG LLAMA_CPP_VERSION="0.1.50" |
|
ARG CMAKE_VERSION=3.26 |
|
ARG CMAKE_VERSION_PATCH=3.26.3 |
|
ARG CMAKE_OS=linux |
|
ARG DEBIAN_FRONTEND=noninteractive |
|
ENV TZ=UTC |
|
|
|
RUN apt-get update && \ |
|
apt-get install --no-install-recommends -y \ |
|
curl git vim build-essential software-properties-common python3 python3-pip python3-dev python3-venv \ |
|
libffi-dev libncurses5-dev zlib1g zlib1g-dev libreadline-dev libbz2-dev libsqlite3-dev libssl-dev \ |
|
libblas-dev liblapack-dev libopenblas-dev cmake && \ |
|
add-apt-repository ppa:ubuntu-toolchain-r/test && \ |
|
apt-get update && \ |
|
apt install --no-install-recommends -y gcc-10 g++-10 && \ |
|
update-alternatives --install /usr/bin/gcc gcc /usr/bin/gcc-10 100 --slave /usr/bin/g++ g++ /usr/bin/g++-10 --slave /usr/bin/gcov gcov /usr/bin/gcov-10 && \ |
|
rm -rf /var/lib/apt/lists/* && \ |
|
pip3 install scikit-build |
|
RUN curl -L https://cmake.org/files/v$CMAKE_VERSION/cmake-$CMAKE_VERSION_PATCH-$CMAKE_OS-x86_64.sh -o /tmp/cmake-$CMAKE_VERSION_PATCH-$CMAKE_OS-x86_64.sh && \ |
|
mkdir /opt/cmake && \ |
|
sh /tmp/cmake-$CMAKE_VERSION_PATCH-$CMAKE_OS-x86_64.sh --skip-license --prefix=/opt/cmake && \ |
|
ln -s /opt/cmake/bin/cmake /usr/local/bin/cmake |
|
|
|
RUN useradd -m -u 1000 appuser |
|
|
|
WORKDIR /build |
|
RUN chown appuser:appuser /build |
|
USER appuser |
|
|
|
ENV HOME /home/appuser |
|
ENV PYENV_ROOT $HOME/.pyenv |
|
ENV PATH $PYENV_ROOT/shims:$PYENV_ROOT/bin:$PATH |
|
|
|
RUN git clone --depth 1 --branch v$LLAMA_CPP_VERSION https://github.com/abetlen/llama-cpp-python.git /build |
|
RUN git clone https://github.com/ggerganov/llama.cpp.git /build/vendor/llama.cpp |
|
RUN curl https://pyenv.run | bash |
|
|
|
RUN pyenv install 3.8.9 && \ |
|
pyenv global 3.8.9 && \ |
|
pyenv rehash && \ |
|
pip install --no-cache-dir --upgrade pip==22.3.1 setuptools wheel && \ |
|
pip install --no-cache-dir datasets "huggingface-hub>=0.12.1" "protobuf<4" "click<8.1" "scikit-build" && \ |
|
CMAKE_ARGS="-DLLAMA_CUBLAS=on -DLLAMA_OPENBLAS=off" FORCE_CMAKE=1 python3 setup.py bdist_wheel && \ |
|
mkdir /build/dists/ && \ |
|
cp dist/llama_cpp_python-${LLAMA_CPP_VERSION}-cp38-cp38-linux_x86_64.whl dists/llama_cpp_python-gpu-${LLAMA_CPP_VERSION}-cp38-cp38-linux_x86_64.whl && \ |
|
CMAKE_ARGS="-DLLAMA_CUBLAS=off -DLLAMA_OPENBLAS=off" FORCE_CMAKE=1 python3 setup.py bdist_wheel && \ |
|
cp dist/llama_cpp_python-${LLAMA_CPP_VERSION}-cp38-cp38-linux_x86_64.whl dists/llama_cpp_python-cpu-${LLAMA_CPP_VERSION}-cp38-cp38-linux_x86_64.whl && \ |
|
ls -l /build/dists/ |
|
|