sohojoe commited on
Commit
f41d645
1 Parent(s): c71f2a1

Revert "Revert "Update app.py""

Browse files

This reverts commit c71f2a1e3e930524cc93900f415175e4c4e7eb83.

Files changed (1) hide show
  1. app.py +1 -2
app.py CHANGED
@@ -9,8 +9,6 @@ import math
9
  # from transformers import CLIPTextModel, CLIPTokenizer
10
  import os
11
 
12
- from clip_retrieval.clip_client import ClipClient, Modality
13
-
14
 
15
  # clip_model_id = "openai/clip-vit-large-patch14-336"
16
  # clip_retrieval_indice_name, clip_model_id ="laion5B-L-14", "/laion/CLIP-ViT-L-14-laion2B-s32B-b82K"
@@ -271,6 +269,7 @@ def on_example_image_click_set_image(input_image, image_url):
271
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
272
 
273
  # from clip_retrieval.load_clip import load_clip, get_tokenizer
 
274
  # model, preprocess = load_clip(clip_model, use_jit=True, device=device)
275
  # tokenizer = get_tokenizer(clip_model)
276
  # clip_retrieval_client = ClipClient(
 
9
  # from transformers import CLIPTextModel, CLIPTokenizer
10
  import os
11
 
 
 
12
 
13
  # clip_model_id = "openai/clip-vit-large-patch14-336"
14
  # clip_retrieval_indice_name, clip_model_id ="laion5B-L-14", "/laion/CLIP-ViT-L-14-laion2B-s32B-b82K"
 
269
  device = "cuda:0" if torch.cuda.is_available() else "cpu"
270
 
271
  # from clip_retrieval.load_clip import load_clip, get_tokenizer
272
+ # from clip_retrieval.clip_client import ClipClient, Modality
273
  # model, preprocess = load_clip(clip_model, use_jit=True, device=device)
274
  # tokenizer = get_tokenizer(clip_model)
275
  # clip_retrieval_client = ClipClient(