salihmarangoz commited on
Commit
933f49e
1 Parent(s): 5b36a9d

added examples

Browse files
Files changed (1) hide show
  1. app.py +16 -3
app.py CHANGED
@@ -16,6 +16,15 @@ tokenizer = AutoTokenizer.from_pretrained("google/siglip-base-patch16-256-multil
16
  num_dimensions = model.vision_model.config.hidden_size # 768
17
  num_k = 30
18
 
 
 
 
 
 
 
 
 
 
19
  def preprocess_images(pathname="images/*", index_file="index.faiss"):
20
  print("Preprocessing images...")
21
  index = faiss.IndexFlatIP(num_dimensions) # Build the index using Inner Product (IP) similarity.
@@ -109,16 +118,20 @@ if __name__ == "__main__":
109
  with gr.Column():
110
  gr.Markdown("This app is powered by [SigLIP](https://huggingface.co/google/siglip-base-patch16-256-multilingual) with multilingual support and [GPR1200 Dataset](https://www.kaggle.com/datasets/mathurinache/gpr1200-dataset) image contents. Enter your query in the text box or upload an image to search for similar images.")
111
  with gr.Tab("Text-Image Search"):
112
- text_input = gr.Textbox(label="Type a word or a sentence", placeholder="a frog waiting on a rock")
113
  search_using_text_btn = gr.Button("Search with text", scale=0)
114
-
 
 
 
 
115
  with gr.Tab("Image-Image Search"):
116
  image_input = gr.Image()
117
  search_using_image_btn = gr.Button("Search with image", scale=0)
118
 
119
  gallery = gr.Gallery(label="Generated images", show_label=False,
120
  elem_id="gallery", columns=3,
121
- object_fit="contain", interactive=False, scale=3)
122
 
123
  search_using_text_btn.click(search_using_text, inputs=text_input, outputs=gallery)
124
  search_using_image_btn.click(search_using_image, inputs=image_input, outputs=gallery)
 
16
  num_dimensions = model.vision_model.config.hidden_size # 768
17
  num_k = 30
18
 
19
+ text_examples = [
20
+ "Frog waiting on a rock",
21
+ "Bird with open mouth",
22
+ "Bridge and a ship",
23
+ "Bike for two people",
24
+ "Biene auf der Blume",
25
+ "Hesap makinesi"
26
+ ]
27
+
28
  def preprocess_images(pathname="images/*", index_file="index.faiss"):
29
  print("Preprocessing images...")
30
  index = faiss.IndexFlatIP(num_dimensions) # Build the index using Inner Product (IP) similarity.
 
118
  with gr.Column():
119
  gr.Markdown("This app is powered by [SigLIP](https://huggingface.co/google/siglip-base-patch16-256-multilingual) with multilingual support and [GPR1200 Dataset](https://www.kaggle.com/datasets/mathurinache/gpr1200-dataset) image contents. Enter your query in the text box or upload an image to search for similar images.")
120
  with gr.Tab("Text-Image Search"):
121
+ text_input = gr.Textbox(label="Type a word or a sentence")
122
  search_using_text_btn = gr.Button("Search with text", scale=0)
123
+ gr.Examples(
124
+ examples = text_examples,
125
+ inputs = [text_input]
126
+ )
127
+
128
  with gr.Tab("Image-Image Search"):
129
  image_input = gr.Image()
130
  search_using_image_btn = gr.Button("Search with image", scale=0)
131
 
132
  gallery = gr.Gallery(label="Generated images", show_label=False,
133
  elem_id="gallery", columns=3,
134
+ object_fit="contain", interactive=False, scale=2.75)
135
 
136
  search_using_text_btn.click(search_using_text, inputs=text_input, outputs=gallery)
137
  search_using_image_btn.click(search_using_image, inputs=image_input, outputs=gallery)