Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -78,31 +78,50 @@ def segment_clothing(img, clothes=["Hat", "Upper-clothes", "Skirt", "Pants", "Dr
|
|
78 |
|
79 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories
|
80 |
|
81 |
-
def find_similar_images(query_embedding, collection, top_k=5):
|
82 |
# ChromaDB์์ ๊ฐ์ฅ ์ ์ฌํ ์ด๋ฏธ์ง๋ค์ ์ฟผ๋ฆฌํฉ๋๋ค.
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
83 |
results = collection.query(
|
84 |
-
query_embeddings=query_embedding
|
85 |
n_results=top_k,
|
86 |
-
include=['metadatas', '
|
87 |
)
|
88 |
|
89 |
-
# ๋ฉํ๋ฐ์ดํฐ์ ์๋ฒ ๋ฉ์ ์ถ์ถํฉ๋๋ค.
|
90 |
top_metadatas = results['metadatas'][0]
|
91 |
-
|
92 |
|
93 |
-
# ์ ํด๋ฆฌ๋ ๊ฑฐ๋ฆฌ ๊ณ์ฐ
|
94 |
-
distances = euclidean_distances(query_embedding.reshape(1, -1), top_embeddings).flatten()
|
95 |
-
|
96 |
structured_results = []
|
97 |
-
for metadata, distance in zip(top_metadatas,
|
98 |
structured_results.append({
|
99 |
'info': metadata,
|
100 |
-
'similarity': 1
|
101 |
})
|
102 |
|
103 |
return structured_results
|
104 |
|
105 |
-
|
106 |
# ์ธ์
์ํ ์ด๊ธฐํ
|
107 |
if 'step' not in st.session_state:
|
108 |
st.session_state.step = 'input'
|
|
|
78 |
|
79 |
return img_with_alpha.convert("RGB"), final_mask, detected_categories
|
80 |
|
81 |
+
#def find_similar_images(query_embedding, collection, top_k=5):
|
82 |
# ChromaDB์์ ๊ฐ์ฅ ์ ์ฌํ ์ด๋ฏธ์ง๋ค์ ์ฟผ๋ฆฌํฉ๋๋ค.
|
83 |
+
# results = collection.query(
|
84 |
+
# query_embeddings=query_embedding.reshape(1, -1), # 2D ๋ฐฐ์ด๋ก ๋ณํ
|
85 |
+
# n_results=top_k,
|
86 |
+
# include=['metadatas', 'embeddings'] # ์๋ฒ ๋ฉ์ ํฌํจํ๋๋ก ์์
|
87 |
+
# )
|
88 |
+
#
|
89 |
+
# # ๋ฉํ๋ฐ์ดํฐ์ ์๋ฒ ๋ฉ์ ์ถ์ถํฉ๋๋ค.
|
90 |
+
# top_metadatas = results['metadatas'][0]
|
91 |
+
# top_embeddings = results['embeddings'][0] # ์๋ฒ ๋ฉ ๊ฐ์ ธ์ค๊ธฐ
|
92 |
+
#
|
93 |
+
# # ์ ํด๋ฆฌ๋ ๊ฑฐ๋ฆฌ ๊ณ์ฐ
|
94 |
+
# distances = euclidean_distances(query_embedding.reshape(1, -1), top_embeddings).flatten()
|
95 |
+
#
|
96 |
+
# structured_results = []
|
97 |
+
# for metadata, distance in zip(top_metadatas, distances):
|
98 |
+
# structured_results.append({
|
99 |
+
# 'info': metadata,
|
100 |
+
# 'similarity': 1 / (1 + distance) # ๊ฑฐ๋ฆฌ ๊ธฐ๋ฐ ์ ์ฌ๋ (๊ฑฐ๋ฆฌ๊ฐ ์์์๋ก ์ ์ฌ๋๊ฐ ๋์)
|
101 |
+
# })
|
102 |
+
|
103 |
+
# return structured_results
|
104 |
+
|
105 |
+
def find_similar_images(query_embedding, collection, top_k=5):
|
106 |
+
query_embedding = query_embedding.reshape(1, -1) # Reshape to 2D array for ChromaDB
|
107 |
results = collection.query(
|
108 |
+
query_embeddings=query_embedding,
|
109 |
n_results=top_k,
|
110 |
+
include=['metadatas', 'distances']
|
111 |
)
|
112 |
|
|
|
113 |
top_metadatas = results['metadatas'][0]
|
114 |
+
top_distances = results['distances'][0]
|
115 |
|
|
|
|
|
|
|
116 |
structured_results = []
|
117 |
+
for metadata, distance in zip(top_metadatas, top_distances):
|
118 |
structured_results.append({
|
119 |
'info': metadata,
|
120 |
+
'similarity': 1 - distance
|
121 |
})
|
122 |
|
123 |
return structured_results
|
124 |
|
|
|
125 |
# ์ธ์
์ํ ์ด๊ธฐํ
|
126 |
if 'step' not in st.session_state:
|
127 |
st.session_state.step = 'input'
|