FROM ghcr.io/ggerganov/llama.cpp:server RUN apt update && \ apt install -y wget ENV HOME=/home/user # https://huggingface.co/docs/hub/spaces-sdks-docker-first-demo # Create user then move server to user's home directory. Finally, we create a models folder. RUN useradd -m -u 1000 user && \ mv /server $HOME && \ mkdir $HOME/models && \ chown -R user:user $HOME USER user WORKDIR $HOME RUN wget https://huggingface.co/gingdev/llama7b-ictu/resolve/main/llama7b_q4_k_m.gguf?download=true -O ictu.gguf ENTRYPOINT [ "./server" ] CMD [ "--host", "0.0.0.0", "--port", "7860", "--model", "ictu.gguf", "-c", "4096", "--chat-template", "chatml", "--embedding" ]