Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -58,7 +58,7 @@ def get_image_embedding(image):
|
|
58 |
with torch.no_grad():
|
59 |
image_features = clip_model.encode_image(image_tensor)
|
60 |
image_features /= image_features.norm(dim=-1, keepdim=True)
|
61 |
-
return image_features.cpu().numpy()
|
62 |
|
63 |
def segment_clothing(img, clothes=["Hat", "Upper-clothes", "Skirt", "Pants", "Dress", "Belt", "Left-shoe", "Right-shoe", "Scarf"]):
|
64 |
# Segment image
|
@@ -88,23 +88,21 @@ def segment_clothing(img, clothes=["Hat", "Upper-clothes", "Skirt", "Pants", "Dr
|
|
88 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories # Return detected categories
|
89 |
|
90 |
def find_similar_images(query_embedding, collection, top_k=5):
|
91 |
-
#
|
92 |
results = collection.query(
|
93 |
-
query_embeddings=
|
94 |
n_results=top_k,
|
95 |
-
include=['metadatas', 'distances']
|
96 |
)
|
97 |
|
98 |
-
|
99 |
-
|
100 |
-
top_distances = results['distances'][0] # ๊ฐ ์ ์ฌ๋ (๊ฑฐ๋ฆฌ๊ฐ ๊ฐ๊น์ธ์๋ก ์ ์ฌํจ)
|
101 |
|
102 |
-
# ๊ฒฐ๊ณผ๋ฅผ ๊ตฌ์กฐํ
|
103 |
structured_results = []
|
104 |
for metadata, distance in zip(top_metadatas, top_distances):
|
105 |
structured_results.append({
|
106 |
'info': metadata,
|
107 |
-
'similarity': 1 - distance
|
108 |
})
|
109 |
|
110 |
return structured_results
|
|
|
58 |
with torch.no_grad():
|
59 |
image_features = clip_model.encode_image(image_tensor)
|
60 |
image_features /= image_features.norm(dim=-1, keepdim=True)
|
61 |
+
return image_features.cpu().numpy().flatten()
|
62 |
|
63 |
def segment_clothing(img, clothes=["Hat", "Upper-clothes", "Skirt", "Pants", "Dress", "Belt", "Left-shoe", "Right-shoe", "Scarf"]):
|
64 |
# Segment image
|
|
|
88 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories # Return detected categories
|
89 |
|
90 |
def find_similar_images(query_embedding, collection, top_k=5):
|
91 |
+
query_embedding = query_embedding.reshape(1, -1) # Reshape to 2D array for ChromaDB
|
92 |
results = collection.query(
|
93 |
+
query_embeddings=query_embedding,
|
94 |
n_results=top_k,
|
95 |
+
include=['metadatas', 'distances']
|
96 |
)
|
97 |
|
98 |
+
top_metadatas = results['metadatas'][0]
|
99 |
+
top_distances = results['distances'][0]
|
|
|
100 |
|
|
|
101 |
structured_results = []
|
102 |
for metadata, distance in zip(top_metadatas, top_distances):
|
103 |
structured_results.append({
|
104 |
'info': metadata,
|
105 |
+
'similarity': 1 - distance
|
106 |
})
|
107 |
|
108 |
return structured_results
|