|
#!/bin/bash |
|
|
|
set -e |
|
|
|
cd "$(dirname "$0")/.." || exit |
|
|
|
MODEL="${MODEL:-./models/13B/ggml-model-q4_0.bin}" |
|
PROMPT_TEMPLATE=${PROMPT_TEMPLATE:-./prompts/chat.txt} |
|
USER_NAME="${USER_NAME:-USER}" |
|
AI_NAME="${AI_NAME:-ChatLLaMa}" |
|
|
|
|
|
N_THREAD="${N_THREAD:-8}" |
|
|
|
N_PREDICTS="${N_PREDICTS:-2048}" |
|
|
|
|
|
|
|
GEN_OPTIONS="${GEN_OPTIONS:---ctx_size 2048 --temp 0.7 --top_k 40 --top_p 0.5 --repeat_last_n 256 --batch_size 1024 --repeat_penalty 1.17647}" |
|
|
|
DATE_TIME=$(date +%H:%M) |
|
DATE_YEAR=$(date +%Y) |
|
|
|
PROMPT_FILE=$(mktemp -t llamacpp_prompt.XXXXXXX.txt) |
|
|
|
sed -e "s/\[\[USER_NAME\]\]/$USER_NAME/g" \ |
|
-e "s/\[\[AI_NAME\]\]/$AI_NAME/g" \ |
|
-e "s/\[\[DATE_TIME\]\]/$DATE_TIME/g" \ |
|
-e "s/\[\[DATE_YEAR\]\]/$DATE_YEAR/g" \ |
|
$PROMPT_TEMPLATE > $PROMPT_FILE |
|
|
|
|
|
./llama-cli $GEN_OPTIONS \ |
|
--model "$MODEL" \ |
|
--threads "$N_THREAD" \ |
|
--n_predict "$N_PREDICTS" \ |
|
--color --interactive \ |
|
--file ${PROMPT_FILE} \ |
|
--reverse-prompt "${USER_NAME}:" \ |
|
--in-prefix ' ' \ |
|
"$@" |
|
|