Update New_file.txt
Browse files- New_file.txt +21 -24
New_file.txt
CHANGED
@@ -49,8 +49,6 @@ similarities = cosine_similarity(ideal_embeddings, candidate_embeddings)
|
|
49 |
print(similarities)
|
50 |
|
51 |
|
52 |
-
## SWIN code
|
53 |
-
|
54 |
import torch
|
55 |
from transformers import SwinTransformer, SwinTransformerImageProcessor
|
56 |
import torchvision.transforms as transforms
|
@@ -78,36 +76,35 @@ def preprocess_image(image_path):
|
|
78 |
ideal_image_paths = ["ideal_image1.jpg", "ideal_image2.jpg", "ideal_image3.jpg"] # Replace with your ideal image file paths
|
79 |
candidate_image_paths = ["candidate_image1.jpg", "candidate_image2.jpg", "candidate_image3.jpg"] # Replace with your candidate image file paths
|
80 |
|
81 |
-
# Calculate
|
82 |
-
|
83 |
-
for image_path in ideal_image_paths:
|
84 |
-
inputs = preprocess_image(image_path)
|
85 |
-
with torch.no_grad():
|
86 |
-
output = model(**inputs)
|
87 |
-
embedding = output['pixel_values'][0].cpu().numpy()
|
88 |
-
ideal_embeddings.append(embedding)
|
89 |
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
inputs = preprocess_image(image_path)
|
94 |
with torch.no_grad():
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
101 |
|
102 |
# Set a similarity threshold (e.g., 0.7)
|
103 |
threshold = 0.7
|
104 |
|
105 |
# Find similar image pairs based on the threshold
|
106 |
similar_pairs = []
|
107 |
-
for
|
108 |
-
|
109 |
-
|
110 |
-
similar_pairs.append((ideal_image_paths[i], candidate_image_paths[j]))
|
111 |
|
112 |
# Print similar image pairs
|
113 |
for pair in similar_pairs:
|
|
|
49 |
print(similarities)
|
50 |
|
51 |
|
|
|
|
|
52 |
import torch
|
53 |
from transformers import SwinTransformer, SwinTransformerImageProcessor
|
54 |
import torchvision.transforms as transforms
|
|
|
76 |
ideal_image_paths = ["ideal_image1.jpg", "ideal_image2.jpg", "ideal_image3.jpg"] # Replace with your ideal image file paths
|
77 |
candidate_image_paths = ["candidate_image1.jpg", "candidate_image2.jpg", "candidate_image3.jpg"] # Replace with your candidate image file paths
|
78 |
|
79 |
+
# Calculate cosine similarities between ideal and candidate images
|
80 |
+
similarities = []
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
|
82 |
+
for ideal_path in ideal_image_paths:
|
83 |
+
ideal_embedding = None
|
84 |
+
inputs_ideal = preprocess_image(ideal_path)
|
|
|
85 |
with torch.no_grad():
|
86 |
+
output_ideal = model(**inputs_ideal)
|
87 |
+
ideal_embedding = output_ideal['pixel_values'][0].cpu().numpy()
|
88 |
+
|
89 |
+
for candidate_path in candidate_image_paths:
|
90 |
+
candidate_embedding = None
|
91 |
+
inputs_candidate = preprocess_image(candidate_path)
|
92 |
+
with torch.no_grad():
|
93 |
+
output_candidate = model(**inputs_candidate)
|
94 |
+
candidate_embedding = output_candidate['pixel_values'][0].cpu().numpy()
|
95 |
+
|
96 |
+
# Calculate cosine similarity between ideal and candidate embeddings
|
97 |
+
similarity = cosine_similarity([ideal_embedding], [candidate_embedding])[0][0]
|
98 |
+
similarities.append((ideal_path, candidate_path, similarity))
|
99 |
|
100 |
# Set a similarity threshold (e.g., 0.7)
|
101 |
threshold = 0.7
|
102 |
|
103 |
# Find similar image pairs based on the threshold
|
104 |
similar_pairs = []
|
105 |
+
for ideal_path, candidate_path, similarity in similarities:
|
106 |
+
if similarity > threshold:
|
107 |
+
similar_pairs.append((ideal_path, candidate_path))
|
|
|
108 |
|
109 |
# Print similar image pairs
|
110 |
for pair in similar_pairs:
|