test-docker / entrypoint.sh
sofianhw's picture
add something
b7d4623
raw
history blame
No virus
1.81 kB
#!/bin/bash
if [[ ! -z "${HF_TOKEN}" ]]; then
echo "The HF_TOKEN environment variable set, logging to Hugging Face."
python3 -c "import huggingface_hub; huggingface_hub.login('${HF_TOKEN}')"
else
echo "The HF_TOKEN environment variable is not set or empty, not logging to Hugging Face."
fi
additional_args=${EXTRA_ARGS:-""}
if [[ ! -z "${QUANTIZATION}" ]]; then
if [[ -z "${DTYPE}" ]]; then
echo "Missing required environment variable DTYPE when QUANTIZATION is set"
exit 1
else
additional_args="${additional_args} -q ${QUANTIZATION} --dtype ${DTYPE}"
fi
fi
if [[ ! -z "${GPU_MEMORY_UTILIZATION}" ]]; then
additional_args="${additional_args} --gpu-memory-utilization ${GPU_MEMORY_UTILIZATION}"
fi
if [[ ! -z "${MAX_MODEL_LEN}" ]]; then
additional_args="${additional_args} --max-model-len ${MAX_MODEL_LEN}"
fi
if [[ ! -z "${TENSOR_PARALLEL_SIZE}" ]]; then
additional_args="${additional_args} --tensor-parallel-size ${TENSOR_PARALLEL_SIZE}"
fi
if [[ ! -z "${DOWNLAD_DIR}" ]]; then
additional_args="${additional_args} --download-dir ${DOWNLAD_DIR}"
fi
if [[ ! -z "${ENFORCE_EAGER}" ]]; then
additional_args="${additional_args} --enforce-eager"
fi
if [[ ! -z "${SERVED_MODEL_NAME}" ]]; then
additional_args="${additional_args} --served-model-name ${SERVED_MODEL_NAME}"
fi
if [[ ! -z "${SWAP_SPACE}" ]]; then
additional_args="${additional_args} --swap-space ${SWAP_SPACE}"
fi
if [[ ! -z "${CHAT_TEMPLATE}" ]]; then
additional_args="${additional_args} --chat-template ${CHAT_TEMPLATE}"
fi
# PATH_MODEL="/data/models--${HF_MODEL/\//--}"
# if [ -d "$PATH_MODEL" ]; then
# HF_MODEL=$PATH_MODEL
# fi
exec python3 -u api_server.py \
--model "${HF_MODEL}" \
--host 0.0.0.0 \
--port 7860 \
${additional_args}