try removing clip-retrieval
Browse files- app.py +3 -2
- requirements.txt +1 -1
app.py
CHANGED
@@ -272,8 +272,9 @@ device = "cuda:0" if torch.cuda.is_available() else "cpu"
|
|
272 |
|
273 |
from clip_retrieval.load_clip import load_clip, get_tokenizer
|
274 |
# model, preprocess = load_clip(clip_model, use_jit=True, device=device)
|
275 |
-
model, preprocess = load_clip(clip_model, use_jit=True, device=device)
|
276 |
-
tokenizer = get_tokenizer(clip_model)
|
|
|
277 |
|
278 |
clip_retrieval_client = ClipClient(
|
279 |
url=clip_retrieval_service_url,
|
|
|
272 |
|
273 |
from clip_retrieval.load_clip import load_clip, get_tokenizer
|
274 |
# model, preprocess = load_clip(clip_model, use_jit=True, device=device)
|
275 |
+
# model, preprocess = load_clip(clip_model, use_jit=True, device=device)
|
276 |
+
# tokenizer = get_tokenizer(clip_model)
|
277 |
+
model, preprocess, tokenizer = None, None, None
|
278 |
|
279 |
clip_retrieval_client = ClipClient(
|
280 |
url=clip_retrieval_service_url,
|
requirements.txt
CHANGED
@@ -7,4 +7,4 @@ numpy
|
|
7 |
# # ftfy
|
8 |
# gradio
|
9 |
# accelerate
|
10 |
-
clip-retrieval == 2.36.1
|
|
|
7 |
# # ftfy
|
8 |
# gradio
|
9 |
# accelerate
|
10 |
+
# clip-retrieval == 2.36.1
|