Spaces:
Sleeping
Sleeping
first test
Browse files- app.py +31 -0
- embeddings_and_paths.pkl +3 -0
- requirements.txt +10 -0
app.py
ADDED
@@ -0,0 +1,31 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from fashion_clip.fashion_clip import FashionCLIP
|
2 |
+
import pickle
|
3 |
+
import subprocess
|
4 |
+
import streamlit as st
|
5 |
+
import numpy as np
|
6 |
+
from PIL import Image
|
7 |
+
|
8 |
+
@st.cache_resource
|
9 |
+
def load_embedding_file():
|
10 |
+
with open("embeddings_and_paths.pkl", "rb") as filino:
|
11 |
+
data = pickle.load(filino)
|
12 |
+
|
13 |
+
images = data["images_path"]
|
14 |
+
embeddings = data["embeddings"]
|
15 |
+
return images, embeddings
|
16 |
+
|
17 |
+
fclip = FashionCLIP('fashion-clip')
|
18 |
+
|
19 |
+
subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True)
|
20 |
+
|
21 |
+
query = st.text_input("Enter a description of the clothing item you want to find", "a red dress")
|
22 |
+
|
23 |
+
images, image_embeddings = load_embedding_file()
|
24 |
+
|
25 |
+
text_embedding = fclip.encode_text([query], 32)[0]
|
26 |
+
|
27 |
+
id_of_matched_object = np.argmax(text_embedding.dot(image_embeddings.T))
|
28 |
+
|
29 |
+
image = Image.open(images[id_of_matched_object])
|
30 |
+
|
31 |
+
st.image(image)
|
embeddings_and_paths.pkl
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:540ff45010ac98c5fe88a7f3891c1bb78fc88c93deb40c0e88c773c8925b04b9
|
3 |
+
size 12174196
|
requirements.txt
ADDED
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
torch
|
2 |
+
transformers
|
3 |
+
pandas==1.4.1
|
4 |
+
numpy
|
5 |
+
Pillow
|
6 |
+
streamlit==1.21.0
|
7 |
+
st_clickable_images
|
8 |
+
plotly
|
9 |
+
datetime
|
10 |
+
fashion-clip
|