sashavor
changing to slugs
b597545
raw
history blame
1.96 kB
import pickle
import gradio as gr
from datasets import load_dataset
from transformers import AutoModel, AutoFeatureExtractor
# Only runs once when the script is first run.
with open("slugs_index_1024_cosine.pickle", "rb") as handle:
index = pickle.load(handle)
# Load model for computing embeddings.
feature_extractor = AutoFeatureExtractor.from_pretrained("sasha/autotrain-butterfly-similarity-2490576840")
model = AutoModel.from_pretrained("sasha/autotrain-butterfly-similarity-2490576840")
# Candidate images.
dataset = load_dataset("sasha/butterflies_10k_names_multiple")
ds = dataset["train"]
def query(image, top_k=4):
inputs = feature_extractor(image, return_tensors="pt")
model_output = model(**inputs)
embedding = model_output.pooler_output.detach()
results = index.query(embedding, k=top_k)
inx = results[0][0].tolist()
logits = results[1][0].tolist()
images = ds.select(inx)["image"]
captions = ds.select(inx)["name"]
images_with_captions = [(i, c) for i, c in zip(images,captions)]
labels_with_probs = dict(zip(captions,logits))
labels_with_probs = {k: 1- v for k, v in labels_with_probs.items()}
return images_with_captions, labels_with_probs
with gr.Blocks() as demo:
gr.Markdown("# Find my Sea Slug 🐌")
gr.Markdown("## Use this Space to find your sea slug, based on the [Nudibranchs of the Sunshine Coast Australia dataset](https://huggingface.co/datasets/sasha/australian_sea_slugs)!")
with gr.Row():
with gr.Column(min_width= 900):
inputs = gr.Image(shape=(800, 1600))
btn = gr.Button("Find my sea slug 🐌!")
with gr.Column():
outputs=gr.Gallery().style(grid=[2], height="auto")
labels = gr.Label()
gr.Markdown("### Image Examples")
gr.Examples(
examples=["elton.jpg", "ken.jpg", "gaga.jpg", "taylor.jpg"],
inputs=inputs,
outputs=[outputs,labels],
fn=query,
cache_examples=True,
)
btn.click(query, inputs, [outputs, labels])
demo.launch()