vinid's picture
Update app.py
fc89b88
raw
history blame contribute delete
No virus
3.91 kB
from fashion_clip.fashion_clip import FashionCLIP
import pickle
import subprocess
import streamlit as st
import numpy as np
from PIL import Image
import os
st.sidebar.write("# FashionCLIP Resources")
st.sidebar.write("We have several resources related to FashionCLIP.")
st.sidebar.write("## Documentation")
st.sidebar.write("* πŸ“š [Blog Post](https://towardsdatascience.com/teaching-clip-some-fashion-3005ac3fdcc3)")
st.sidebar.write("* πŸ“š [Paper](https://www.nature.com/articles/s41598-022-23052-9)")
st.sidebar.write("## Code")
st.sidebar.write("* πŸ“š [Repo](https://github.com/patrickjohncyh/fashion-clip)")
st.sidebar.write("* πŸ“š [Colab](https://colab.research.google.com/drive/1Z1hAxBnWjF76bEi9KQ6CMBBEmI_FVDrW#scrollTo=FzUQGwS1lhGS)")
st.sidebar.write("* πŸ“š [HuggingFace Weights](https://huggingface.co/patrickjohncyh/fashion-clip)")
st.write("# FashionCLIP. A Foundation Model for Fashion.")
st.write("[![Youtube Video](https://img.shields.io/badge/youtube-video-red)](https://www.youtube.com/watch?v=uqRSc-KSA1Y) [![HuggingFace Model](https://img.shields.io/badge/HF%20Model-Weights-yellow)](https://huggingface.co/patrickjohncyh/fashion-clip) [![Open In Colab](https://colab.research.google.com/assets/colab-badge.svg)](https://colab.research.google.com/drive/1Z1hAxBnWjF76bEi9KQ6CMBBEmI_FVDrW?usp=sharing) [![Medium Blog Post](https://raw.githubusercontent.com/aleen42/badges/master/src/medium.svg)](https://towardsdatascience.com/teaching-clip-some-fashion-3005ac3fdcc3) [![Open in Streamlit](https://static.streamlit.io/badges/streamlit_badge_black_white.svg)](https://huggingface.co/spaces/vinid/fashion-clip-app)")
st.write("This web app uses FashionCLIP, our new foundation model for fashion, to find clothing items based on a query of the item you want to find.")
st.write("The model is going to find the most similar item to your query, given a list of 5000 items that have been released by Alexey Grigorev [here](https://github.com/alexeygrigorev/clothing-dataset).")
st.write("Note that some queries might not return anything useful. This could be both due to model's limitation or to the fact that the item you are looking for is missing from the collection.")
st.write("You can find more about FashionCLIP on the [repo](https://github.com/patrickjohncyh/fashion-clip) or on our [paper](https://www.nature.com/articles/s41598-022-23052-9)")
@st.cache_resource
def load_embedding_file():
with open("embeddings_and_paths.pkl", "rb") as filino:
data = pickle.load(filino)
images = data["images_path"]
embeddings = data["embeddings"]
return images, embeddings
fclip = FashionCLIP('fashion-clip')
if not os.path.exists("clothing-dataset"):
subprocess.run("git clone https://github.com/alexeygrigorev/clothing-dataset", shell=True)
st.write("## Simple FashionCLIP search engine")
query = st.text_input("Enter a description of the clothing item you want to find", "a red dress")
images, image_embeddings = load_embedding_file()
text_embedding = fclip.encode_text([query], 32)[0]
id_of_matched_object = np.argmax(text_embedding.dot(image_embeddings.T))
image = Image.open(images[id_of_matched_object])
st.image(image)
st.write("If you use FashionCLIP in your work, please cite our paper:")
st.write("""
```
@Article{Chia2022,
title="Contrastive language and vision learning of general fashion concepts",
author="Chia, Patrick John
and Attanasio, Giuseppe
and Bianchi, Federico
and Terragni, Silvia
and Magalh{\~a}es, Ana Rita
and Goncalves, Diogo
and Greco, Ciro
and Tagliabue, Jacopo",
journal="Scientific Reports",
year="2022",
month="Nov",
day="08",
volume="12",
number="1",
pages="18958",
issn="2045-2322",
doi="10.1038/s41598-022-23052-9",
url="https://doi.org/10.1038/s41598-022-23052-9"
```
}""")