sashavor
adding examples
c3eae7d
raw
history blame
1.94 kB
import pickle
import gradio as gr
from datasets import load_dataset
from transformers import AutoModel, AutoFeatureExtractor
seed = 42
# Only runs once when the script is first run.
with open("index_768_cosine.pickle", "rb") as handle:
index = pickle.load(handle)
# Load model for computing embeddings.
feature_extractor = AutoFeatureExtractor.from_pretrained("sasha/autotrain-butterfly-similarity-2490576840")
model = AutoModel.from_pretrained("sasha/autotrain-butterfly-similarity-2490576840")
# Candidate images.
dataset = load_dataset("sasha/butterflies_10k_names_multiple")
ds = dataset["train"]
def query(image, top_k=4):
inputs = feature_extractor(image, return_tensors="pt")
model_output = model(**inputs)
embedding = model_output.pooler_output.detach()
results = index.query(embedding, k=top_k)
inx = results[0][0].tolist()
logits = results[1][0].tolist()
images = ds.select(inx)["image"]
captions = ds.select(inx)["name"]
images_with_captions = [(i, c) for i, c in zip(images,captions)]
labels_with_probs = dict(zip(captions,logits))
labels_with_probs = {k: 1- v for k, v in labels_with_probs.items()}
return images_with_captions, labels_with_probs
with gr.Blocks() as demo:
gr.Markdown("# Find my Butterfly 🦋")
gr.Markdown("## Use this Space to find your butterfly, based on the [iNaturalist butterfly dataset](https://huggingface.co/datasets/huggan/inat_butterflies_top10k)!")
with gr.Row():
with gr.Column(min_width= 900):
inputs = gr.Image(shape=(800, 1600))
btn = gr.Button("Find my butterfly!")
with gr.Column():
outputs=gr.Gallery().style(grid=[2], height="auto")
labels = gr.Label()
gr.Markdown("### Image Examples")
gr.Examples(
examples=["elton.jpg", "ken.jpg", "gaga.jpg", "taylor.jpg"],
inputs=inputs,
outputs=[outputs,labels],
fn=query,
cache_examples=True,
)
btn.click(query, inputs, [outputs, labels])
demo.launch()