Spaces:
Running
Running
mr06cpp
commited on
Commit
·
58058b8
0
Parent(s):
Initial commit
Browse files- Dockerfile +15 -0
- README.md +12 -0
- start.sh +109 -0
Dockerfile
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Building from source is too slow, so let's just customize the existing image
|
2 |
+
FROM ghcr.io/open-webui/open-webui:git-143ac08
|
3 |
+
|
4 |
+
WORKDIR /app
|
5 |
+
|
6 |
+
USER 0:0
|
7 |
+
|
8 |
+
# HACK for huggingface.co iframe
|
9 |
+
RUN sed -i "s|set_cookie(|set_cookie(samesite='none',secure=True,|g" backend/open_webui/apps/webui/routers/auths.py
|
10 |
+
|
11 |
+
RUN pip install "litellm[proxy]==1.44.27" && chown -R 1000:0 /app
|
12 |
+
USER 1000:0
|
13 |
+
|
14 |
+
COPY ./start.sh /start.sh
|
15 |
+
CMD [ "bash", "/start.sh" ]
|
README.md
ADDED
@@ -0,0 +1,12 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
---
|
2 |
+
title: Open WebUI
|
3 |
+
emoji: 🌍
|
4 |
+
colorFrom: indigo
|
5 |
+
colorTo: purple
|
6 |
+
sdk: docker
|
7 |
+
pinned: false
|
8 |
+
app_port: 8080
|
9 |
+
---
|
10 |
+
|
11 |
+
A simple, opinionated Open WebUI setup for Hugging Face Spaces.
|
12 |
+
It opts to maximize ease-of-use over customizability.
|
start.sh
ADDED
@@ -0,0 +1,109 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
#!/usr/bin/env bash
|
2 |
+
set -euo pipefail
|
3 |
+
|
4 |
+
pids=()
|
5 |
+
|
6 |
+
generate_litellm_provider() {
|
7 |
+
provider="$1"
|
8 |
+
key_env="$2"
|
9 |
+
|
10 |
+
if [[ -n "${!key_env:-}" ]]; then
|
11 |
+
>&2 echo "[!] Found key ${key_env} for ${provider}"
|
12 |
+
cat <<EOF
|
13 |
+
- model_name: "${provider}/*"
|
14 |
+
litellm_params:
|
15 |
+
model: "${provider}/*"
|
16 |
+
api_key: "os.environ/${key_env}"
|
17 |
+
EOF
|
18 |
+
fi
|
19 |
+
}
|
20 |
+
|
21 |
+
generate_litellm_config() {
|
22 |
+
cat <<EOF
|
23 |
+
model_list:
|
24 |
+
- model_name: huggingface/google/gemma-2-2b-it
|
25 |
+
litellm_params:
|
26 |
+
model: huggingface/google/gemma-2-2b-it
|
27 |
+
EOF
|
28 |
+
generate_litellm_provider "openai" "OPENAI_API_KEY"
|
29 |
+
generate_litellm_provider "perplexity" "PERPLEXITY_API_KEY"
|
30 |
+
generate_litellm_provider "anthropic" "ANTHROPIC_API_KEY"
|
31 |
+
|
32 |
+
if [[ -n "${LITELLM_MODELS_BASE64:-}" ]]; then
|
33 |
+
echo "${LITELLM_MODELS_BASE64}" | base64 -d
|
34 |
+
fi
|
35 |
+
}
|
36 |
+
|
37 |
+
start_litellm() {
|
38 |
+
(
|
39 |
+
# KISS: No persistence for LiteLLM
|
40 |
+
unset DATABASE_URL
|
41 |
+
|
42 |
+
litellm \
|
43 |
+
--host "127.0.0.1" \
|
44 |
+
--port "4000" \
|
45 |
+
--config /tmp/litellm_config.yaml
|
46 |
+
) &
|
47 |
+
pids+=("$!")
|
48 |
+
}
|
49 |
+
|
50 |
+
start_open_webui() {
|
51 |
+
(
|
52 |
+
# By default, we expect it to be deployed to a private HF Space.
|
53 |
+
# You can enable WEBUI_AUTH as needed
|
54 |
+
if [[ -z "${WEBUI_AUTH:-}" ]]; then
|
55 |
+
>&2 echo "[!] Enabling single user mode"
|
56 |
+
export WEBUI_AUTH="False"
|
57 |
+
fi
|
58 |
+
|
59 |
+
# The less the user needs to configure, the better :)
|
60 |
+
if [[ -n "${OPENAI_API_KEY:-}" ]] && [[ -z "${WEBUI_SECRET_KEY:-}${WEBUI_JWT_SECRET_KEY:-}" ]]; then
|
61 |
+
>&2 echo "[!] Using OpenAI API key as Web UI secret key"
|
62 |
+
export WEBUI_SECRET_KEY="${OPENAI_API_KEY}"
|
63 |
+
fi
|
64 |
+
|
65 |
+
if [[ -z "${ENABLE_RAG_WEB_SEARCH:-}${RAG_WEB_SEARCH_ENGINE:-}" ]]; then
|
66 |
+
if [[ -n "${BRAVE_SEARCH_API_KEY:-}" ]]; then
|
67 |
+
export RAG_WEB_SEARCH_ENGINE="brave"
|
68 |
+
export ENABLE_RAG_WEB_SEARCH="True"
|
69 |
+
fi
|
70 |
+
fi
|
71 |
+
|
72 |
+
if [[ -z "${DATABASE_URL:-}" ]]; then
|
73 |
+
unset DATABASE_URL
|
74 |
+
fi
|
75 |
+
|
76 |
+
export ENABLE_OLLAMA_API="${ENABLE_OLLAMA_API:-False}"
|
77 |
+
|
78 |
+
export OPENAI_API_BASE_URLS="http://localhost:4000"
|
79 |
+
export OPENAI_API_KEY="sk-unused"
|
80 |
+
|
81 |
+
export ENABLE_IMAGE_GENERATION="True"
|
82 |
+
export IMAGES_OPENAI_API_BASE_URL="http://localhost:4000"
|
83 |
+
export IMAGES_OPENAI_API_KEY="sk-unused"
|
84 |
+
|
85 |
+
export PGSSLCERT=/tmp/postgresql.crt
|
86 |
+
|
87 |
+
env
|
88 |
+
/app/backend/start.sh
|
89 |
+
) &
|
90 |
+
pids+=("$!")
|
91 |
+
}
|
92 |
+
|
93 |
+
wait_litellm() {
|
94 |
+
while ! curl -s http://localhost:4000 >/dev/null; do
|
95 |
+
>&2 echo "[!] Waiting for LiteLLM..."
|
96 |
+
sleep 1
|
97 |
+
done
|
98 |
+
}
|
99 |
+
|
100 |
+
generate_litellm_config >/tmp/litellm_config.yaml
|
101 |
+
>&2 cat /tmp/litellm_config.yaml
|
102 |
+
|
103 |
+
start_litellm
|
104 |
+
wait_litellm
|
105 |
+
start_open_webui
|
106 |
+
|
107 |
+
for pid in "${pids[@]}"; do
|
108 |
+
wait "$pid"
|
109 |
+
done
|