vvv-knyazeva commited on
Commit
2c52d39
1 Parent(s): a13d4f2

Update stri.py

Browse files
Files changed (1) hide show
  1. stri.py +8 -8
stri.py CHANGED
@@ -51,7 +51,7 @@ book_embeddings = []
51
  for inputs, attention_masks in zip(input_ids, attention_mask):
52
  with torch.no_grad():
53
  book_embedding = model(inputs.unsqueeze(0), attention_mask=attention_masks.unsqueeze(0))
54
- book_embedding = book_embedding[0][:, 0, :]#.detach().cpu().numpy()
55
  book_embeddings.append(np.squeeze(book_embedding))
56
 
57
  # Определение запроса пользователя
@@ -68,17 +68,17 @@ query_mask = torch.tensor(query_mask, dtype=torch.long)
68
 
69
  with torch.no_grad():
70
  query_embedding = model(query_padded.unsqueeze(0), query_mask.unsqueeze(0))
71
- query_embedding = query_embedding[0][:, 0, :].detach().cpu().numpy()
72
 
73
  # Вычисление косинусного расстояния между эмбеддингом запроса и каждой аннотацией
74
- #cosine_similarities = torch.nn.functional.cosine_similarity(
75
- # query_embedding.squeeze(0),
76
- # torch.stack(book_embeddings)
77
- #)
78
  cosine_similarities = torch.nn.functional.cosine_similarity(
79
- torch.tensor(query_embedding.squeeze(0)),
80
- torch.stack([torch.tensor(embedding) for embedding in book_embeddings])
81
  )
 
 
 
 
82
 
83
  cosine_similarities = cosine_similarities.numpy()
84
 
 
51
  for inputs, attention_masks in zip(input_ids, attention_mask):
52
  with torch.no_grad():
53
  book_embedding = model(inputs.unsqueeze(0), attention_mask=attention_masks.unsqueeze(0))
54
+ book_embedding = book_embedding[0][:, 0, :] #.detach().cpu().numpy()
55
  book_embeddings.append(np.squeeze(book_embedding))
56
 
57
  # Определение запроса пользователя
 
68
 
69
  with torch.no_grad():
70
  query_embedding = model(query_padded.unsqueeze(0), query_mask.unsqueeze(0))
71
+ query_embedding = query_embedding[0][:, 0, :] #.detach().cpu().numpy()
72
 
73
  # Вычисление косинусного расстояния между эмбеддингом запроса и каждой аннотацией
 
 
 
 
74
  cosine_similarities = torch.nn.functional.cosine_similarity(
75
+ query_embedding.squeeze(0),
76
+ torch.stack(book_embeddings)
77
  )
78
+ #cosine_similarities = torch.nn.functional.cosine_similarity(
79
+ # torch.tensor(query_embedding.squeeze(0)),
80
+ # torch.stack([torch.tensor(embedding) for embedding in book_embeddings])
81
+ #)
82
 
83
  cosine_similarities = cosine_similarities.numpy()
84