Spaces:
Runtime error
Runtime error
resolve gated repo issue
Browse files- .gitignore +2 -0
- app.py +10 -6
.gitignore
ADDED
@@ -0,0 +1,2 @@
|
|
|
|
|
|
|
1 |
+
#Ignore this .enc file
|
2 |
+
.env
|
app.py
CHANGED
@@ -7,19 +7,23 @@ import os
|
|
7 |
|
8 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
9 |
|
10 |
-
|
11 |
-
|
|
|
|
|
|
|
|
|
12 |
|
13 |
try:
|
14 |
-
model_colpali = ColPali.from_pretrained("vidore/colpali-v1.2", torch_dtype=torch.bfloat16).to(device)
|
15 |
-
processor_colpali = ColPaliProcessor.from_pretrained("google/paligemma-3b-mix-448")
|
16 |
except Exception as e:
|
17 |
st.error(f"Error loading ColPali model or processor: {e}")
|
18 |
st.stop()
|
19 |
|
20 |
try:
|
21 |
-
model_qwen = Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-7B-Instruct").to(device)
|
22 |
-
processor_qwen = AutoProcessor.from_pretrained("Qwen/Qwen2-VL-7B-Instruct")
|
23 |
except Exception as e:
|
24 |
st.error(f"Error loading Qwen model or processor: {e}")
|
25 |
st.stop()
|
|
|
7 |
|
8 |
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
9 |
|
10 |
+
hf_token = os.getenv('HF_TOKEN')
|
11 |
+
try:
|
12 |
+
model = pipeline("image-to-text", model="google/paligemma-3b-mix-448", use_auth_token=hf_token)
|
13 |
+
except Exception as e:
|
14 |
+
st.error(f"Error loading image-to-text model: {e}")
|
15 |
+
st.stop()
|
16 |
|
17 |
try:
|
18 |
+
model_colpali = ColPali.from_pretrained("vidore/colpali-v1.2", torch_dtype=torch.bfloat16, use_auth_token=hf_token).to(device)
|
19 |
+
processor_colpali = ColPaliProcessor.from_pretrained("google/paligemma-3b-mix-448", use_auth_token=hf_token)
|
20 |
except Exception as e:
|
21 |
st.error(f"Error loading ColPali model or processor: {e}")
|
22 |
st.stop()
|
23 |
|
24 |
try:
|
25 |
+
model_qwen = Qwen2VLForConditionalGeneration.from_pretrained("Qwen/Qwen2-VL-7B-Instruct", use_auth_token=hf_token).to(device)
|
26 |
+
processor_qwen = AutoProcessor.from_pretrained("Qwen/Qwen2-VL-7B-Instruct", use_auth_token=hf_token)
|
27 |
except Exception as e:
|
28 |
st.error(f"Error loading Qwen model or processor: {e}")
|
29 |
st.stop()
|