File size: 866 Bytes
ebafbdd
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
from fashion_clip.fashion_clip import FashionCLIP
import pickle
import subprocess
import streamlit as st
import numpy as np
from PIL import Image

@st.cache_resource
def load_embedding_file():
    with open("embeddings_and_paths.pkl", "rb") as filino:
        data = pickle.load(filino)

        images = data["images_path"]
        embeddings = data["embeddings"]
    return images, embeddings

fclip = FashionCLIP('fashion-clip')

subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True)

query = st.text_input("Enter a description of the clothing item you want to find", "a red dress")

images, image_embeddings = load_embedding_file()

text_embedding = fclip.encode_text([query], 32)[0]

id_of_matched_object = np.argmax(text_embedding.dot(image_embeddings.T))

image = Image.open(images[id_of_matched_object])

st.image(image)