File size: 231,582 Bytes
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
d478e55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
 
ccf48ef
d478e55
 
ccf48ef
d478e55
 
ccf48ef
d478e55
ccf48ef
d478e55
 
 
 
 
 
 
ccf48ef
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
 
 
 
 
 
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
d478e55
 
 
 
 
ccf48ef
d478e55
 
 
 
 
 
ccf48ef
d478e55
 
 
 
 
 
ccf48ef
d478e55
 
 
 
 
 
 
 
 
 
 
 
 
ccf48ef
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
 
 
 
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
 
 
 
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
 
 
 
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
d478e55
 
 
 
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
 
 
 
ccf48ef
d478e55
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
d478e55
ccf48ef
d478e55
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
ccf48ef
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
611
612
613
614
615
616
617
618
619
620
621
622
623
624
625
626
627
628
629
630
631
632
633
634
635
636
637
638
639
640
641
642
643
644
645
646
647
648
649
650
651
652
653
654
655
656
657
658
659
660
661
662
663
664
665
666
667
668
669
670
671
672
673
674
675
676
677
678
679
680
681
682
683
684
685
686
687
688
689
690
691
692
693
694
695
696
697
698
699
700
701
702
703
704
705
706
707
708
709
710
711
712
713
714
715
716
717
718
719
720
721
722
723
724
725
726
727
728
729
730
731
732
733
734
735
736
737
738
739
740
741
742
743
744
745
746
747
748
749
750
751
752
753
754
755
756
757
758
759
760
761
762
763
764
765
766
767
768
769
770
771
772
773
774
775
776
777
778
779
780
781
782
783
784
785
786
787
788
789
790
791
792
793
794
795
796
797
798
799
800
801
802
803
804
805
806
807
808
809
810
811
812
813
814
815
816
817
818
819
820
821
822
823
824
825
826
827
828
829
830
831
832
833
834
835
836
837
838
839
840
841
842
843
844
845
846
847
848
849
850
851
852
853
854
855
856
857
858
859
860
861
862
863
864
865
866
867
868
869
870
871
872
873
874
875
876
877
878
879
880
881
882
883
884
885
886
887
888
889
890
891
892
893
894
895
896
897
898
899
900
901
902
903
904
905
906
907
908
909
910
911
912
913
914
915
916
917
918
919
920
921
922
923
924
925
926
927
928
929
930
931
932
933
934
935
936
937
938
939
940
941
942
943
944
945
946
947
948
949
950
951
952
953
954
955
956
957
958
959
960
961
962
963
964
965
966
967
968
969
970
971
972
973
974
975
976
977
978
979
980
981
982
983
984
985
986
987
988
989
990
991
992
993
994
995
996
997
998
999
1000
1001
1002
1003
1004
1005
1006
1007
1008
1009
1010
1011
1012
1013
1014
1015
1016
1017
1018
1019
1020
1021
1022
1023
1024
1025
1026
1027
1028
1029
1030
1031
1032
1033
1034
1035
1036
1037
1038
1039
1040
1041
1042
1043
1044
1045
1046
1047
1048
1049
1050
1051
1052
1053
1054
1055
1056
1057
1058
1059
1060
1061
1062
1063
1064
1065
1066
1067
1068
1069
1070
1071
1072
1073
1074
1075
1076
1077
1078
1079
1080
1081
1082
1083
1084
1085
1086
1087
1088
1089
1090
1091
1092
1093
1094
1095
1096
1097
1098
1099
1100
1101
1102
1103
1104
1105
1106
1107
1108
1109
1110
1111
1112
1113
1114
1115
1116
1117
1118
1119
1120
1121
1122
1123
1124
1125
1126
1127
1128
1129
1130
1131
1132
1133
1134
1135
1136
1137
1138
1139
1140
1141
1142
1143
1144
1145
1146
1147
1148
1149
1150
1151
1152
1153
1154
1155
1156
1157
1158
1159
1160
1161
1162
1163
1164
1165
1166
1167
1168
1169
1170
1171
1172
1173
1174
1175
1176
1177
1178
1179
1180
1181
1182
1183
1184
1185
1186
1187
1188
1189
1190
1191
1192
1193
1194
1195
1196
1197
1198
1199
1200
1201
1202
1203
1204
1205
1206
1207
1208
1209
1210
1211
1212
1213
1214
1215
1216
1217
1218
1219
1220
1221
1222
1223
1224
1225
1226
1227
1228
1229
1230
1231
1232
1233
1234
1235
1236
1237
1238
1239
1240
1241
1242
1243
1244
1245
1246
1247
1248
1249
1250
1251
1252
1253
1254
1255
1256
1257
1258
1259
1260
1261
1262
1263
1264
1265
1266
1267
1268
1269
1270
1271
1272
1273
1274
1275
1276
1277
1278
1279
1280
1281
1282
1283
1284
1285
1286
1287
1288
1289
1290
1291
1292
1293
1294
1295
1296
1297
1298
1299
1300
1301
1302
1303
1304
1305
1306
1307
1308
1309
1310
1311
1312
1313
1314
1315
1316
1317
1318
1319
1320
1321
1322
1323
1324
1325
1326
1327
1328
1329
1330
1331
1332
1333
1334
1335
1336
1337
1338
1339
1340
1341
1342
1343
1344
1345
1346
1347
1348
1349
1350
1351
1352
1353
1354
1355
1356
1357
1358
1359
1360
1361
1362
1363
1364
1365
1366
1367
1368
1369
1370
1371
1372
1373
1374
1375
1376
1377
1378
1379
1380
1381
1382
1383
1384
1385
1386
1387
1388
1389
1390
1391
1392
1393
1394
1395
1396
1397
1398
1399
1400
1401
1402
1403
1404
1405
1406
1407
1408
1409
1410
1411
1412
1413
1414
1415
1416
1417
1418
1419
1420
1421
1422
1423
1424
1425
1426
1427
1428
1429
1430
1431
1432
1433
1434
1435
1436
1437
1438
1439
1440
1441
1442
1443
1444
1445
1446
1447
1448
1449
1450
1451
1452
1453
1454
1455
1456
1457
1458
1459
1460
1461
1462
1463
1464
1465
1466
1467
1468
1469
1470
1471
1472
1473
1474
1475
1476
1477
1478
1479
1480
1481
1482
1483
1484
1485
1486
1487
1488
1489
1490
1491
1492
1493
1494
1495
1496
1497
1498
1499
1500
1501
1502
1503
1504
1505
1506
1507
1508
1509
1510
1511
---
base_model: microsoft/deberta-v3-small
datasets:
- jinaai/negation-dataset-v2
- tals/vitaminc
- allenai/scitail
- allenai/sciq
- allenai/qasc
- sentence-transformers/msmarco-msmarco-distilbert-base-v3
- sentence-transformers/natural-questions
- sentence-transformers/trivia-qa
- sentence-transformers/gooaq
- google-research-datasets/paws
language:
- en
library_name: sentence-transformers
metrics:
- pearson_cosine
- spearman_cosine
- pearson_manhattan
- spearman_manhattan
- pearson_euclidean
- spearman_euclidean
- pearson_dot
- spearman_dot
- pearson_max
- spearman_max
- cosine_accuracy
- dot_accuracy
- manhattan_accuracy
- euclidean_accuracy
- max_accuracy
- cosine_accuracy_threshold
- cosine_f1
- cosine_f1_threshold
- cosine_precision
- cosine_recall
- cosine_ap
- dot_accuracy_threshold
- dot_f1
- dot_f1_threshold
- dot_precision
- dot_recall
- dot_ap
- manhattan_accuracy_threshold
- manhattan_f1
- manhattan_f1_threshold
- manhattan_precision
- manhattan_recall
- manhattan_ap
- euclidean_accuracy_threshold
- euclidean_f1
- euclidean_f1_threshold
- euclidean_precision
- euclidean_recall
- euclidean_ap
- max_accuracy_threshold
- max_f1
- max_f1_threshold
- max_precision
- max_recall
- max_ap
pipeline_tag: sentence-similarity
tags:
- sentence-transformers
- sentence-similarity
- feature-extraction
- generated_from_trainer
- dataset_size:226010
- loss:CachedGISTEmbedLoss
widget:
- source_sentence: what is the common lifespan of a star
  sentences:
  - Mites can leave bites that look like they came from bed bugs (see these pictures
    of bed bug bites), but not all mites are the same, so let me quickly explain.
    In fact, there are almost 46,000 species of mites, but only a few bite humans!
    They are the Northern Fowl Mite, Tropical Rat Mite, and Itch or Scabies Mite.
  - Cost of Cardiac Catheterization Procedures Any type of cardiac care in the United
    States is growing increasingly pricey. A cardiac catheterization procedure, depending
    on location, may range in price between $2,400 and $4,000 in the United States.
  - "Lifespans for main sequence stars have a vast range. Whilst our Sun will spend\
    \ 10 billion years on the main sequence, a high-mass, ten solar-mass (10 M Sun)\
    \ star will only last 20 million years (2.0Ã\x97 10 7 years) on the main sequence.A\
    \ star with a only half the mass of Sun can spend 80 billion years on the main\
    \ sequence.tars are composed almost entirely of hydrogen and helium. A star such\
    \ as our Sun is about 73% hydrogen by mass and 25% helium. If determined by number\
    \ of nuclei then it is 92% hydrogen and 7.8% helium. The remaining 2% by mass\
    \ or 0.2% by number is all the heavier elements."
- source_sentence: More than 169 countries had reported over 212,000 COVID-19 cases
    before March 19 , 2020 .
  sentences:
  - As of 23 March , more than 341,000 cases of COVID-19 have been reported in 192
    countries and territories , resulting in more than 14,700 deaths and 99,000 recoveries
    .
  - As of 21 March , more than 278,000 cases of COVID-19 have been reported in over
    186 countries and territories , resulting in more than 11,500 deaths and 92,000
    recoveries.  virus seems to mostly spread between people via respiratory droplets
    .
  - As of 18 March 2020 , more than 212,000 cases of COVID-19 have been reported in
    at least 170 countries and territories , with major outbreaks in China , Iran
    and the European Union .
- source_sentence: 'The memory walk saw participants gather at Bents Park in South
    Shields on Saturday and travel 7km (4.3miles) along a coastal route.

    The Alzheimer''s Society said it was the biggest event of its kind it had staged
    in the north-east of England.

    Organisers said the number of participants almost doubled that of last year''s
    event.

    About 35,000 people in the region have dementia, according to the charity.'
  sentences:
  - More than 4,500 people have taken part in a charity event raising funds for the
    fight against Alzheimer's disease.
  - Gareth Southgate should be appointed England manager "as soon as possible" and
    be given the same contract as predecessor Sam Allardyce, says former Three Lions
    defender Danny Mills.
  - The owners of a Gwynedd skip hire business have been jailed for illegally storing
    waste.
- source_sentence: Electrical energy can be converted into kinetic energy and heat
    energy by an electric motor.
  sentences:
  - Solution is the term for a homogeneous mixture of two or more substances.
  - Solution is the term for a homogeneous mixture of two or more substances.
  - Electric motors transform electrical energy into kinetic energy.
- source_sentence: who did ben assault in home and away
  sentences:
  - List of Home and Away characters (2017) Ben and Maggie learn that Ziggy has been
    dating Brody in secret and they disapprove of the relationship. Ziggy leaves the
    house and Ben tells her not to come back. He apologises to her the next day, but
    does not accept her relationship with Brody, so Ziggy refuses to come home. Brody
    later breaks up with her. Ben begins making surf boards to sell at the pier. Ben
    finds Coco convulsing in the garden and he and Maggie learn she has bulimia. Ziggy
    later leaves home. Days later, Ben sees her with Brody, who is attempting to bring
    her home, and punches him in the face. Olivia Fraser Richards (Raechelle Banno)
    tells Sergeant Phillip McCarthy (Nicholas Cassim) and Ben is arrested. McCarthy
    and Kat Chapman (Pia Miller) learns he has a criminal record for assaulting his
    brother. Ben insults Kat, which leads to him being charged. Maggie secures a loan
    to get him out on bail. Maggie's mother, Diana (Sarah Chadwick) came to Summer
    Bay to visit the family and Diana told Ben and Maggie that she is the one who
    bailed Ben out of jail.
  - 'Stone (unit) The name "stone" derives from the use of stones for weights, a practice
    that dates back into antiquity. The Biblical law against the carrying of "diverse
    weights, a large and a small"[7] is more literally translated as "you shall not
    carry a stone and a stone (אבן ואבן), a large and a small". There was no standardised
    "stone" in the ancient Jewish world,[8] but in Roman times stone weights were
    crafted to multiples of the Roman pound.[9] Such weights varied in quality: the
    Yale Medical Library holds 10 and 50-pound examples of polished serpentine,[10]
    while a 40-pound example at the Eschborn Museum (see right) is made of sandstone.[11]'
  - Bad Things (Machine Gun Kelly and Camila Cabello song) "Bad Things" is a song
    by American rapper Machine Gun Kelly and Cuban-American singer Camila Cabello.
    The song was released on October 14, 2016 and was produced by The Futuristics.
    Its music video was directed by Hannah Lux Davis and premiered on December 1,
    2016. The song features an interpolation of Fastball's 1999 single "Out of My
    Head". The single peaked at number four on the US Billboard Hot 100.
model-index:
- name: SentenceTransformer based on microsoft/deberta-v3-small
  results:
  - task:
      type: semantic-similarity
      name: Semantic Similarity
    dataset:
      name: sts test
      type: sts-test
    metrics:
    - type: pearson_cosine
      value: 0.846466596875356
      name: Pearson Cosine
    - type: spearman_cosine
      value: 0.8904026640082261
      name: Spearman Cosine
    - type: pearson_manhattan
      value: 0.8784788599905751
      name: Pearson Manhattan
    - type: spearman_manhattan
      value: 0.8827673246412652
      name: Spearman Manhattan
    - type: pearson_euclidean
      value: 0.8775593452462649
      name: Pearson Euclidean
    - type: spearman_euclidean
      value: 0.8810458375755512
      name: Spearman Euclidean
    - type: pearson_dot
      value: 0.8480375809921659
      name: Pearson Dot
    - type: spearman_dot
      value: 0.8705808575512824
      name: Spearman Dot
    - type: pearson_max
      value: 0.8784788599905751
      name: Pearson Max
    - type: spearman_max
      value: 0.8904026640082261
      name: Spearman Max
  - task:
      type: triplet
      name: Triplet
    dataset:
      name: NLI v2
      type: NLI-v2
    metrics:
    - type: cosine_accuracy
      value: 1.0
      name: Cosine Accuracy
    - type: dot_accuracy
      value: 0.0
      name: Dot Accuracy
    - type: manhattan_accuracy
      value: 1.0
      name: Manhattan Accuracy
    - type: euclidean_accuracy
      value: 1.0
      name: Euclidean Accuracy
    - type: max_accuracy
      value: 1.0
      name: Max Accuracy
  - task:
      type: binary-classification
      name: Binary Classification
    dataset:
      name: VitaminC
      type: VitaminC
    metrics:
    - type: cosine_accuracy
      value: 0.578125
      name: Cosine Accuracy
    - type: cosine_accuracy_threshold
      value: 0.787956714630127
      name: Cosine Accuracy Threshold
    - type: cosine_f1
      value: 0.6577540106951871
      name: Cosine F1
    - type: cosine_f1_threshold
      value: 0.2885514795780182
      name: Cosine F1 Threshold
    - type: cosine_precision
      value: 0.4900398406374502
      name: Cosine Precision
    - type: cosine_recall
      value: 1.0
      name: Cosine Recall
    - type: cosine_ap
      value: 0.5535321831241837
      name: Cosine Ap
    - type: dot_accuracy
      value: 0.5703125
      name: Dot Accuracy
    - type: dot_accuracy_threshold
      value: 331.16162109375
      name: Dot Accuracy Threshold
    - type: dot_f1
      value: 0.6577540106951871
      name: Dot F1
    - type: dot_f1_threshold
      value: 112.65699005126953
      name: Dot F1 Threshold
    - type: dot_precision
      value: 0.4900398406374502
      name: Dot Precision
    - type: dot_recall
      value: 1.0
      name: Dot Recall
    - type: dot_ap
      value: 0.5538572188565111
      name: Dot Ap
    - type: manhattan_accuracy
      value: 0.57421875
      name: Manhattan Accuracy
    - type: manhattan_accuracy_threshold
      value: 273.09307861328125
      name: Manhattan Accuracy Threshold
    - type: manhattan_f1
      value: 0.6577540106951871
      name: Manhattan F1
    - type: manhattan_f1_threshold
      value: 495.97412109375
      name: Manhattan F1 Threshold
    - type: manhattan_precision
      value: 0.4900398406374502
      name: Manhattan Precision
    - type: manhattan_recall
      value: 1.0
      name: Manhattan Recall
    - type: manhattan_ap
      value: 0.5467997618408617
      name: Manhattan Ap
    - type: euclidean_accuracy
      value: 0.58203125
      name: Euclidean Accuracy
    - type: euclidean_accuracy_threshold
      value: 13.288713455200195
      name: Euclidean Accuracy Threshold
    - type: euclidean_f1
      value: 0.6594594594594595
      name: Euclidean F1
    - type: euclidean_f1_threshold
      value: 22.802650451660156
      name: Euclidean F1 Threshold
    - type: euclidean_precision
      value: 0.4939271255060729
      name: Euclidean Precision
    - type: euclidean_recall
      value: 0.991869918699187
      name: Euclidean Recall
    - type: euclidean_ap
      value: 0.5502971665088517
      name: Euclidean Ap
    - type: max_accuracy
      value: 0.58203125
      name: Max Accuracy
    - type: max_accuracy_threshold
      value: 331.16162109375
      name: Max Accuracy Threshold
    - type: max_f1
      value: 0.6594594594594595
      name: Max F1
    - type: max_f1_threshold
      value: 495.97412109375
      name: Max F1 Threshold
    - type: max_precision
      value: 0.4939271255060729
      name: Max Precision
    - type: max_recall
      value: 1.0
      name: Max Recall
    - type: max_ap
      value: 0.5538572188565111
      name: Max Ap
---

# SentenceTransformer based on microsoft/deberta-v3-small

This is a [sentence-transformers](https://www.SBERT.net) model finetuned from [microsoft/deberta-v3-small](https://huggingface.co/microsoft/deberta-v3-small) on the [negation-triplets](https://huggingface.co/datasets/jinaai/negation-dataset-v2), [vitaminc-pairs](https://huggingface.co/datasets/tals/vitaminc), [scitail-pairs-qa](https://huggingface.co/datasets/allenai/scitail), [scitail-pairs-pos](https://huggingface.co/datasets/allenai/scitail), xsum-pairs, [sciq_pairs](https://huggingface.co/datasets/allenai/sciq), [qasc_pairs](https://huggingface.co/datasets/allenai/qasc), openbookqa_pairs, [msmarco_pairs](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3), [nq_pairs](https://huggingface.co/datasets/sentence-transformers/natural-questions), [trivia_pairs](https://huggingface.co/datasets/sentence-transformers/trivia-qa), [gooaq_pairs](https://huggingface.co/datasets/sentence-transformers/gooaq) and [paws-pos](https://huggingface.co/datasets/google-research-datasets/paws) datasets. It maps sentences & paragraphs to a 768-dimensional dense vector space and can be used for semantic textual similarity, semantic search, paraphrase mining, text classification, clustering, and more.

## Model Details

### Model Description
- **Model Type:** Sentence Transformer
- **Base model:** [microsoft/deberta-v3-small](https://huggingface.co/microsoft/deberta-v3-small) <!-- at revision a36c739020e01763fe789b4b85e2df55d6180012 -->
- **Maximum Sequence Length:** 512 tokens
- **Output Dimensionality:** 768 tokens
- **Similarity Function:** Cosine Similarity
- **Training Datasets:**
    - [negation-triplets](https://huggingface.co/datasets/jinaai/negation-dataset-v2)
    - [vitaminc-pairs](https://huggingface.co/datasets/tals/vitaminc)
    - [scitail-pairs-qa](https://huggingface.co/datasets/allenai/scitail)
    - [scitail-pairs-pos](https://huggingface.co/datasets/allenai/scitail)
    - xsum-pairs
    - [sciq_pairs](https://huggingface.co/datasets/allenai/sciq)
    - [qasc_pairs](https://huggingface.co/datasets/allenai/qasc)
    - openbookqa_pairs
    - [msmarco_pairs](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3)
    - [nq_pairs](https://huggingface.co/datasets/sentence-transformers/natural-questions)
    - [trivia_pairs](https://huggingface.co/datasets/sentence-transformers/trivia-qa)
    - [gooaq_pairs](https://huggingface.co/datasets/sentence-transformers/gooaq)
    - [paws-pos](https://huggingface.co/datasets/google-research-datasets/paws)
- **Language:** en
<!-- - **License:** Unknown -->

### Model Sources

- **Documentation:** [Sentence Transformers Documentation](https://sbert.net)
- **Repository:** [Sentence Transformers on GitHub](https://github.com/UKPLab/sentence-transformers)
- **Hugging Face:** [Sentence Transformers on Hugging Face](https://huggingface.co/models?library=sentence-transformers)

### Full Model Architecture

```
SentenceTransformer(
  (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: DebertaV2Model 
  (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': False, 'pooling_mode_mean_tokens': True, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
)
```

## Usage

### Direct Usage (Sentence Transformers)

First install the Sentence Transformers library:

```bash
pip install -U sentence-transformers
```

Then you can load this model and run inference.
```python
from sentence_transformers import SentenceTransformer

# Download from the 🤗 Hub
model = SentenceTransformer("bobox/DeBERTa-small-ST-v1-toytest")
# Run inference
sentences = [
    'who did ben assault in home and away',
    "List of Home and Away characters (2017) Ben and Maggie learn that Ziggy has been dating Brody in secret and they disapprove of the relationship. Ziggy leaves the house and Ben tells her not to come back. He apologises to her the next day, but does not accept her relationship with Brody, so Ziggy refuses to come home. Brody later breaks up with her. Ben begins making surf boards to sell at the pier. Ben finds Coco convulsing in the garden and he and Maggie learn she has bulimia. Ziggy later leaves home. Days later, Ben sees her with Brody, who is attempting to bring her home, and punches him in the face. Olivia Fraser Richards (Raechelle Banno) tells Sergeant Phillip McCarthy (Nicholas Cassim) and Ben is arrested. McCarthy and Kat Chapman (Pia Miller) learns he has a criminal record for assaulting his brother. Ben insults Kat, which leads to him being charged. Maggie secures a loan to get him out on bail. Maggie's mother, Diana (Sarah Chadwick) came to Summer Bay to visit the family and Diana told Ben and Maggie that she is the one who bailed Ben out of jail.",
    'Bad Things (Machine Gun Kelly and Camila Cabello song) "Bad Things" is a song by American rapper Machine Gun Kelly and Cuban-American singer Camila Cabello. The song was released on October 14, 2016 and was produced by The Futuristics. Its music video was directed by Hannah Lux Davis and premiered on December 1, 2016. The song features an interpolation of Fastball\'s 1999 single "Out of My Head". The single peaked at number four on the US Billboard Hot 100.',
]
embeddings = model.encode(sentences)
print(embeddings.shape)
# [3, 768]

# Get the similarity scores for the embeddings
similarities = model.similarity(embeddings, embeddings)
print(similarities.shape)
# [3, 3]
```

<!--
### Direct Usage (Transformers)

<details><summary>Click to see the direct usage in Transformers</summary>

</details>
-->

<!--
### Downstream Usage (Sentence Transformers)

You can finetune this model on your own dataset.

<details><summary>Click to expand</summary>

</details>
-->

<!--
### Out-of-Scope Use

*List how the model may foreseeably be misused and address what users ought not to do with the model.*
-->

## Evaluation

### Metrics

#### Semantic Similarity
* Dataset: `sts-test`
* Evaluated with [<code>EmbeddingSimilarityEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.EmbeddingSimilarityEvaluator)

| Metric              | Value      |
|:--------------------|:-----------|
| pearson_cosine      | 0.8465     |
| **spearman_cosine** | **0.8904** |
| pearson_manhattan   | 0.8785     |
| spearman_manhattan  | 0.8828     |
| pearson_euclidean   | 0.8776     |
| spearman_euclidean  | 0.881      |
| pearson_dot         | 0.848      |
| spearman_dot        | 0.8706     |
| pearson_max         | 0.8785     |
| spearman_max        | 0.8904     |

#### Triplet
* Dataset: `NLI-v2`
* Evaluated with [<code>TripletEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.TripletEvaluator)

| Metric             | Value   |
|:-------------------|:--------|
| cosine_accuracy    | 1.0     |
| dot_accuracy       | 0.0     |
| manhattan_accuracy | 1.0     |
| euclidean_accuracy | 1.0     |
| **max_accuracy**   | **1.0** |

#### Binary Classification
* Dataset: `VitaminC`
* Evaluated with [<code>BinaryClassificationEvaluator</code>](https://sbert.net/docs/package_reference/sentence_transformer/evaluation.html#sentence_transformers.evaluation.BinaryClassificationEvaluator)

| Metric                       | Value      |
|:-----------------------------|:-----------|
| cosine_accuracy              | 0.5781     |
| cosine_accuracy_threshold    | 0.788      |
| cosine_f1                    | 0.6578     |
| cosine_f1_threshold          | 0.2886     |
| cosine_precision             | 0.49       |
| cosine_recall                | 1.0        |
| cosine_ap                    | 0.5535     |
| dot_accuracy                 | 0.5703     |
| dot_accuracy_threshold       | 331.1616   |
| dot_f1                       | 0.6578     |
| dot_f1_threshold             | 112.657    |
| dot_precision                | 0.49       |
| dot_recall                   | 1.0        |
| dot_ap                       | 0.5539     |
| manhattan_accuracy           | 0.5742     |
| manhattan_accuracy_threshold | 273.0931   |
| manhattan_f1                 | 0.6578     |
| manhattan_f1_threshold       | 495.9741   |
| manhattan_precision          | 0.49       |
| manhattan_recall             | 1.0        |
| manhattan_ap                 | 0.5468     |
| euclidean_accuracy           | 0.582      |
| euclidean_accuracy_threshold | 13.2887    |
| euclidean_f1                 | 0.6595     |
| euclidean_f1_threshold       | 22.8027    |
| euclidean_precision          | 0.4939     |
| euclidean_recall             | 0.9919     |
| euclidean_ap                 | 0.5503     |
| max_accuracy                 | 0.582      |
| max_accuracy_threshold       | 331.1616   |
| max_f1                       | 0.6595     |
| max_f1_threshold             | 495.9741   |
| max_precision                | 0.4939     |
| max_recall                   | 1.0        |
| **max_ap**                   | **0.5539** |

<!--
## Bias, Risks and Limitations

*What are the known or foreseeable issues stemming from this model? You could also flag here known failure cases or weaknesses of the model.*
-->

<!--
### Recommendations

*What are recommendations with respect to the foreseeable issues? For example, filtering explicit content.*
-->

## Training Details

### Training Datasets

#### negation-triplets

* Dataset: [negation-triplets](https://huggingface.co/datasets/jinaai/negation-dataset-v2)
* Size: 26,000 training samples
* Columns: <code>anchor</code>, <code>entailment</code>, and <code>negative</code>
* Approximate statistics based on the first 1000 samples:
  |         | anchor                                                                             | entailment                                                                        | negative                                                                          |
  |:--------|:-----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                            | string                                                                            |
  | details | <ul><li>min: 5 tokens</li><li>mean: 22.32 tokens</li><li>max: 124 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 14.05 tokens</li><li>max: 42 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 14.36 tokens</li><li>max: 42 tokens</li></ul> |
* Samples:
  | anchor                                                                                                                                                                                                               | entailment                                                                                                               | negative                                                                                                              |
  |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------|
  | <code>BraÅ ov is part of the Transylvania area .</code>                                                                                                                                                              | <code>Like many other cities in Transylvania , BraÅ ov is also home for a significant ethnic Hungarian minority .</code> | <code>Like many other cities in Transylvania, BraÅ ov is also home for a significant ethnic Romanian majority.</code> |
  | <code>If some of the principles of supersymmetry are correct , it is possible to recreate these superparticles with particle accelerators . This attempt could prove or disprove the ideas of supersymmetry .</code> | <code>It is possible to have more than one kind of supersymmetry transformation .</code>                                 | <code>It is impossible to find even one kind of supersymmetry transformation.</code>                                  |
  | <code>A group of people running a bicycle race past a red building.</code>                                                                                                                                           | <code>A bunch of people run past a building</code>                                                                       | <code>A few people stay still near a building</code>                                                                  |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### vitaminc-pairs

* Dataset: [vitaminc-pairs](https://huggingface.co/datasets/tals/vitaminc) at [be6febb](https://huggingface.co/datasets/tals/vitaminc/tree/be6febb761b0b2807687e61e0b5282e459df2fa0)
* Size: 24,000 training samples
* Columns: <code>claim</code> and <code>evidence</code>
* Approximate statistics based on the first 1000 samples:
  |         | claim                                                                             | evidence                                                                           |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 7 tokens</li><li>mean: 17.43 tokens</li><li>max: 67 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 37.41 tokens</li><li>max: 366 tokens</li></ul> |
* Samples:
  | claim                                                                                                                                                  | evidence                                                                                                                                                                                                                                   |
  |:-------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>By March 2016 , Baby was above the 8th most viewed YouTube video .</code>                                                                        | <code>On March 5 , 2014 , `` Baby '' became the second video , after `` Gangnam Style '' , to receive 1 billion views on YouTube , and is the ninth most viewed video on the site , with over 1.33 billion views as of March 2016 .</code> |
  | <code>The movie Think Like A Man had a rating of less than 50 % on Metacritic .</code>                                                                 | <code>Early reviews for the film were mixed , the film currently holds a 47 % on Metacritic , indicating `` mixed or average reviews '' .</code>                                                                                           |
  | <code>Animal consumption at the Huanan Seafood Market is suspected to be where the severe acute respiratory syndrome coronavirus 2 originated .</code> | <code>Animals sold for food are suspected to be the reservoir because many of first identified infected individuals were workers at the Huanan Seafood Market .</code>                                                                     |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### scitail-pairs-qa

* Dataset: [scitail-pairs-qa](https://huggingface.co/datasets/allenai/scitail) at [0cc4353](https://huggingface.co/datasets/allenai/scitail/tree/0cc4353235b289165dfde1c7c5d1be983f99ce44)
* Size: 14,237 training samples
* Columns: <code>sentence2</code> and <code>sentence1</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence2                                                                         | sentence1                                                                         |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            |
  | details | <ul><li>min: 7 tokens</li><li>mean: 16.12 tokens</li><li>max: 41 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 15.23 tokens</li><li>max: 41 tokens</li></ul> |
* Samples:
  | sentence2                                                                                                                                    | sentence1                                                                                                                          |
  |:---------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------|
  | <code>Instruments that measure the angle of the slope of a volcano are called tilt meters.</code>                                            | <code>Instruments that measure the angle of the slope of a volcano are called what?</code>                                         |
  | <code>Ultrasound, a diagnostic technology, uses high-frequency vibrations transmitted into any tissue in contact with the transducer.</code> | <code>What diagnostic technology uses high-frequency vibrations transmitted into any tissue in contact with the transducer?</code> |
  | <code>Many species of birds in new england fly south for the winter months to find an environment with more food.</code>                     | <code>Which of the following best explains why many species of birds in New England fly south for the winter months?</code>        |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### scitail-pairs-pos

* Dataset: [scitail-pairs-pos](https://huggingface.co/datasets/allenai/scitail) at [0cc4353](https://huggingface.co/datasets/allenai/scitail/tree/0cc4353235b289165dfde1c7c5d1be983f99ce44)
* Size: 8,600 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            |
  | details | <ul><li>min: 8 tokens</li><li>mean: 23.36 tokens</li><li>max: 74 tokens</li></ul> | <ul><li>min: 7 tokens</li><li>mean: 15.79 tokens</li><li>max: 41 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                               | sentence2                                                                                                     |
  |:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------|
  | <code>Anyway, what makes it possible for insects to walk on water is called Surface tension .</code>                                                                                                    | <code>Surface tension is responsible for the fact that small insects can walk on water.</code>                |
  | <code>Elastic potential energy is the potential energy of an elastic object (for example a bow or a catapult) that is deformed under tension or compression (or stressed in formal terminology).</code> | <code>The term elastic potential energy is used to describe potential energy due to an object’s shape.</code> |
  | <code>But, season or not, tornadoes can occur at any time of the year, if the weather conditions are right.</code>                                                                                      | <code>Tornadoes can occur in any.</code>                                                                      |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### xsum-pairs

* Dataset: xsum-pairs
* Size: 24,000 training samples
* Columns: <code>document</code> and <code>summary</code>
* Approximate statistics based on the first 1000 samples:
  |         | document                                                                             | summary                                                                           |
  |:--------|:-------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                               | string                                                                            |
  | details | <ul><li>min: 45 tokens</li><li>mean: 255.53 tokens</li><li>max: 487 tokens</li></ul> | <ul><li>min: 8 tokens</li><li>mean: 25.67 tokens</li><li>max: 42 tokens</li></ul> |
* Samples:
  | document                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                               | summary                                                                                                                |
  |:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------|
  | <code>Jean Galligan, 82, from Dumfries, died when her car caught fire after it was involved in a collision with a Vauxhall Zafira on 14 May.<br>Police Scotland said Mrs Galligan was driving a red Daihatsu which was burnt out as a result of the accident on the A76 at Holywood.<br>Neither the driver nor the front seat passenger in the Zafira were injured.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                             | <code>A woman killed in a road accident near Dumfries has been named by police.</code>                                 |
  | <code>Police officers carried out arrests on Thursday in connection with alleged sex offences against females which occurred between 2008 and 2015.<br>Six men were charged with the rape of a girl under 16 as well as other sexual offences, while a seventh man was charged with conspiracy to rape.<br>Six men - all from Oxford - will appear before Oxford magistrates.<br>They are: Shabir Dogar, 22; Shabaz Khan, 23; Shohab Dogar, 23; Yasin Hamid, 20; Usman Iddris, 22; and Joseph Suraina, 22.<br>Waqas Hussain, 24, of no fixed abode, will appear at Oxford Magistrates' Court on 4 April.<br>Mr Hussain has also been charged with the attempted sexual assault of a girl under 13, as have Shabir Dogar and Shohab Dogar.<br>The raids were part of what the police are calling Operation Nautical.<br>A further 10 men were also arrested on Wednesday as part of the same operation.</code>                                                                                                                                                                                                                                                                                                                                                                                                          | <code>Seven men have been charged in connection with a major child sexual exploitation investigation in Oxford.</code> |
  | <code>In February 1957, 11-year-old Moira Anderson left her grandmother's house in Coatbridge to go to the shops but never returned.<br>Bus driver and convicted paedophile Alexander Gartshore, who died in 2006, is suspected of her murder.<br>Police are now looking at an area of Monkland Canal in an attempt to find her remains.<br>Moira Anderson was last seen on 23 February 1957 when she left on an errand during a heavy snowstorm, and boarded a Baxter's bus that was driven by Gartshore.<br>Later that year, he was jailed for raping a 17-year-old babysitter.<br>In 1999, convicted child abuser James Gallogley named his former friend Gartshore as Moira's murderer.<br>Gartshore's own daughter Sandra Brown was convinced he was the killer and campaigned to have him charged.<br>In 2014 prosecutors took the unusual step of announcing that Gartshore would have faced prosecution for the schoolgirl's murder if he were still alive.<br>In 1957 a witness reported seeing a tall man carrying a large, heavy sack towards the canal the morning after Moira disappeared but the possible sighting was not followed up.<br>Four years ago a grave in Old Monkland Cemetery in Coatbridge was exhumed as part of the search but no evidence was found that Moira was buried there.</code> | <code>A new search to find a schoolgirl who disappeared 60 years in North Lanarkshire has begun.</code>                |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### sciq_pairs

* Dataset: [sciq_pairs](https://huggingface.co/datasets/allenai/sciq) at [2c94ad3](https://huggingface.co/datasets/allenai/sciq/tree/2c94ad3e1aafab77146f384e23536f97a4849815)
* Size: 11,095 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 7 tokens</li><li>mean: 16.67 tokens</li><li>max: 60 tokens</li></ul> | <ul><li>min: 2 tokens</li><li>mean: 82.57 tokens</li><li>max: 512 tokens</li></ul> |
* Samples:
  | sentence1                                                                                    | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                            |
  |:---------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>The punnett square shows the possible what, and their most likely ratios?</code>       | <code>If the parents had four offspring, their most likely genotypes would be one BB, two Bb, and one bb. But the genotype ratios of their actual offspring may differ. That's because which gametes happen to unite is a matter of chance, like a coin toss. The Punnett square just shows the possible genotypes and their most likely ratios.</code>                                                                                                              |
  | <code>Which hormones work together to control the level of glucose in the blood?</code>      | <code>The pancreas is located near the stomach. Its hormones include insulin and glucagon. These two hormones work together to control the level of glucose in the blood. Insulin causes excess blood glucose to be taken up by the liver, which stores the glucose as glycogen. Glucagon stimulates the liver to break down glycogen into glucose and release it back into the blood. The pancreas also secretes digestive enzymes into the digestive tract.</code> |
  | <code>What is the“packet” of energy called that the nucleus emits during gamma decay?</code> | <code>Gamma rays are produced during gamma decay of an excited nucleus. During gamma decay, the nucleus emits a “packet” of energy called a gamma particle.</code>                                                                                                                                                                                                                                                                                                   |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### qasc_pairs

* Dataset: [qasc_pairs](https://huggingface.co/datasets/allenai/qasc) at [a34ba20](https://huggingface.co/datasets/allenai/qasc/tree/a34ba204eb9a33b919c10cc08f4f1c8dae5ec070)
* Size: 7,727 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 5 tokens</li><li>mean: 11.36 tokens</li><li>max: 22 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 34.66 tokens</li><li>max: 66 tokens</li></ul> |
* Samples:
  | sentence1                                                             | sentence2                                                                                                                                                                                                           |
  |:----------------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>Orbiting spacecraft reentering the Earth's atmosphere</code>    | <code>friction causes an object to lose energy. Dear Ashlee, The heat in the reentry phase is due to friction between the spacecraft and the air.. Spacecraft that are reentering the atmosphere lose energy</code> |
  | <code>The chance of you developing cancer depends most on your</code> | <code>Cancer genes can be inherited.. Genes are inherited from parents.. Developing cancer can depend on your parents</code>                                                                                        |
  | <code>Why does a snake look for shelter in the winter?</code>         | <code>shelter is used for protection by animals against weather. Many snakes seek shelter from the winter weather by holding up in dens.. Snakes use shelter to protect themselves in the winter</code>             |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### openbookqa_pairs

* Dataset: openbookqa_pairs
* Size: 4,522 training samples
* Columns: <code>question</code> and <code>fact</code>
* Approximate statistics based on the first 1000 samples:
  |         | question                                                                         | fact                                                                             |
  |:--------|:---------------------------------------------------------------------------------|:---------------------------------------------------------------------------------|
  | type    | string                                                                           | string                                                                           |
  | details | <ul><li>min: 3 tokens</li><li>mean: 13.8 tokens</li><li>max: 78 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 11.5 tokens</li><li>max: 30 tokens</li></ul> |
* Samples:
  | question                                                                     | fact                                                                                  |
  |:-----------------------------------------------------------------------------|:--------------------------------------------------------------------------------------|
  | <code>What is animal competition?</code>                                     | <code>if two animals eat the same prey then those animals compete for that pey</code> |
  | <code>If you wanted to make a metal bed frame, where would you start?</code> | <code>alloys are made of two or more metals</code>                                    |
  | <code>Places lacking warmth have few what</code>                             | <code>cold environments contain few organisms</code>                                  |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### msmarco_pairs

* Dataset: [msmarco_pairs](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3) at [28ff31e](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3/tree/28ff31e4c97cddd53d298497f766e653f1e666f9)
* Size: 22,000 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                        | sentence2                                                                           |
  |:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                           | string                                                                              |
  | details | <ul><li>min: 4 tokens</li><li>mean: 8.75 tokens</li><li>max: 38 tokens</li></ul> | <ul><li>min: 14 tokens</li><li>mean: 76.85 tokens</li><li>max: 201 tokens</li></ul> |
* Samples:
  | sentence1                                                       | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                      |
  |:----------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>what is hydrolysis in digestion</code>                    | <code>Digestion and Hydrolysis The digestion process relies upon hydrolysis to render the biochemical reactions that break down food. The digestive tract secretes enzymes, such as proteases, carbohydrases, nucleases and lipases that, along with water, catalyze the hydrolysis that releases various nutrients.</code>                                                                                                                                                                                                                                                                                                                    |
  | <code>is cartier a good watch</code>                            | <code>CARTIER WATCHES. AuthenticWatches.com is one of the largest Internet Dealers for authentic Cartier watches. Cartier watches have no equal with respect to elegance and luxury. The name Cartier is synonymous with exquisite luxury and quality. Founded in 1847 by Louis-Fran ois Cartier, Cartier has led the industry in jewelry and watches alike. Cartier watches boast a large variety of design and functionality, yet maintain the utmost quality and sophistication in every series.</code>                                                                                                                                     |
  | <code>what vitamin is a precursor for a neurotransmitter</code> | <code>Tryptophan is an essential amino acid which is the precursor of serotonin. Serotonin is a brain neurotransmitter, platelet clotting factor and neurohormone found in organs throughout the body. Metabolism of tryptophan to serotonin requires nutrients such as vitamin B6, niacin and glutathione.ower doses, as little as 1000 to 2000 mg, have been found to be effective clinically, as well as experimentally in animals. The minimum daily requirement for adults of tyrosine and its precursor, phenylalanine, is 16 mg/kg a day or about 1000 mg total. Hence, 6 g is at least six times the minimum daily requirement.</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### nq_pairs

* Dataset: [nq_pairs](https://huggingface.co/datasets/sentence-transformers/natural-questions) at [f9e894e](https://huggingface.co/datasets/sentence-transformers/natural-questions/tree/f9e894e1081e206e577b4eaa9ee6de2b06ae6f17)
* Size: 22,000 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                          | sentence2                                                                            |
  |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                               |
  | details | <ul><li>min: 10 tokens</li><li>mean: 11.92 tokens</li><li>max: 27 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 132.65 tokens</li><li>max: 512 tokens</li></ul> |
* Samples:
  | sentence1                                                       | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                           |
  |:----------------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>friends episode with the turkey on the head</code>        | <code>The One with All the Thanksgivings "The One with All the Thanksgivings" (also known as "The One with the Thanksgiving Flashbacks"[2]) is the eighth episode of the fifth season of Friends. It first aired on the NBC network in the United States on November 19, 1998. In the episode, the main characters spend Thanksgiving at Monica's (Courteney Cox) apartment and begin telling stories about their worst Thanksgivings: Chandler (Matthew Perry) learning of his parents' divorce, Phoebe (Lisa Kudrow) losing arms in past lives and Joey (Matt LeBlanc) having his head stuck in a turkey. Rachel (Jennifer Aniston) reveals Monica's worst Thanksgiving—accidentally cutting off Chandler's toe after he called her "fat" in their first encounter. When Monica begs Chandler to forgive her, he accidentally reveals that he loves her.</code> |
  | <code>who played the first buford pusser in walking tall</code> | <code>Walking Tall (1973 film) Buford Pusser (Joe Don Baker), at his wife Pauline's (Elizabeth Hartman) behest, retires from the professional wrestling ring and moves back to Tennessee to start a logging business with his father, Carl Pusser (Noah Beery, Jr.).</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                         |
  | <code>when did the us let go of the philippines</code>          | <code>History of the Philippines (1898–1946) The history of the Philippines from 1898 to 1946 covers the period of American rule in the Philippines and began with the outbreak of the Spanish–American War in April 1898, when the Philippines was still part of the Spanish East Indies, and concluded when the United States formally recognised the independence of the Republic of the Philippines on July 4, 1946.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                     |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### trivia_pairs

* Dataset: [trivia_pairs](https://huggingface.co/datasets/sentence-transformers/trivia-qa) at [a7c36e3](https://huggingface.co/datasets/sentence-transformers/trivia-qa/tree/a7c36e3c8c8c01526bc094d79bf80d4c848b0ad0)
* Size: 20,000 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                            |
  |:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                               |
  | details | <ul><li>min: 8 tokens</li><li>mean: 18.89 tokens</li><li>max: 60 tokens</li></ul> | <ul><li>min: 15 tokens</li><li>mean: 456.48 tokens</li><li>max: 512 tokens</li></ul> |
* Samples:
  | sentence1                                                                                            | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                |
  |:-----------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>With the symbol Wb what is the unit of magnetic flux?</code>                                   | <code>schoolphysics ::Welcome:: HOME > AGE 16 - 19 > ELECTRICITY AND MAGNETISM > ELECTROMAGNETISM > FLUX AND FLUX DENSITY Flux and flux density To understand the meaning of magnetic flux (Φ) and magnetic flux density (B) think first about an ordinary bar magnet. Around the magnet there is a magnetic field and this gives a �flow of magnetic energy� around the magnet. It is this flow of energy that we call magnetic flux (Φ). We think of magnetic flux as flowing from the north pole of a magnet round to its south pole as shown by the arrows on the lines in the diagram.  Looking at the diagram you should see that there is as much flux flowing �from the north pole� as there is �flowing into the south pole�. Magnetic flux is given the symbol Φ and is measured in units called Webers (Wb). However the amount of magnetic flux flowing through a given area will change from one point to another around the magnet and you can understand this by thinking about a loop of wire placed in the field at two different points (A and B). You can see that in position B there are a smaller number of magnetic field lines passing through the loop than there is when it is in position A. We call the amount of flux passing through a unit area at right angles to the magnetic field lines the flux density (B) at that point. Flux density is measured in Tesla (T) where 1 T = 1 Wbm-2 So: Flux (Φ) = Flux density (B) x area through which flux passes (A)    Φ = BA If we now use more than one loop of wire, in others words a coil of N turns as shown in position C the flux flowing through the N turns is simply N times that flowing through the single loop. The quantity NΦ is called the the flux linkage for the coil at that point. Therefore:</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                     |
  | <code>The informal term for a gangster, especially belonging to the Mafia is?</code>                 | <code>goodfella - definition of goodfella in English | Oxford Dictionaries Definition of goodfella in English: goodfella A gangster, especially a member of a Mafia family. Example sentences ‘Saturday nights are reserved by the goodfellas for their wives, and Fridays, for the mistresses.’ ‘Bobby of course is the most sympathetic goodfella in the Soprano family.’ ‘If you notice a few goodfellas gawking every so often, don't reach for your pockets too quickly.’ ‘Compared to this, the goodfellas seem like sedentary sentimentalists, trapped animals rooted in dying communities, doomed territories.’ Pronunciation Which of the following is correct? She took over the business She brought over the business Which of the following is correct? The bad weather took famine The bad weather brought famine Which of the following is correct? The matter is taken before a jury The matter is brought before a jury Which of the following is correct? She took a deep breath She brought a deep breath Which of the following is correct? These pills should take the pain away These pills should bring the pain away Which of the following is correct? The memory took a smile to my face The memory brought a smile to my face Which of the following is correct? Take the pastry and roll it out Bring the pastry and roll it out Which of the following is correct? He took her coat away from her He brought her coat away from her Which of the following is correct? The crisis could bring down the regime The crisis could take down the regime Which of the following is correct? Will you bring me back some scent from Paris? Will you take me back some scent from Paris? You scored /10 practise again? Retry</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                               |
  | <code>What did the band S Club 7 change their name to when Paul Cattermole left in June 2002?</code> | <code>BBC News | MUSIC | Paul leaves S Club 7 Thursday, 28 March, 2002, 11:53 GMT Paul leaves S Club 7 Paul Cattermole, 25, is the group's oldest member S Club 7 star Paul Cattermole, who has announced he is quitting the band, is to join a heavy metal rock group. The band will continue as a six-piece under the name S Club, and Cattermole will remain with them to work on their fourth TV series. He will also perform at the Queen's Golden Jubilee concert at Buckingham Palace in June. They were founded and managed by Spice Girls svengali Simon Fuller But Cattermole is to switch styles by joining Skua, a group of old school friends playing nu-metal music. He told the Sun newspaper: "I want a change musically." No new release is likely for some time - as Skua do not have a record deal. Cattermole's departure came as his former bandmates re-signed their deal with both their management company and record label, which will make each of the members a millionaire. An S Club spokeswoman described the split as amicable. 'Rumour' Aged 25, Cattermole is the oldest member of the group, and is currently dating S Club's Hannah Spearitt. S Club's Rachel will be part of the newly named S Club In January, the band denied claims they were set to split, issuing a statement that there was "no truth in the rumour". They released their first single, Bring It All Back, in June 1999, and it went straight to number one. In 2000 they started work on their second TV series, LA7, which saw them move to Los Angeles. The series went on to become a big hit on American TV. The group won best British newcomer at the Brit Awards in March 2000 and hit the top spot in December with Never Had A Dream Come True, which saw profits going to the BBC's Children In Need charity. 'Stupid' They had two more number ones in 2001 with Don't Stop Movin' and their second Children In Need song, Have You Ever. The group also hit the headlines in March when three of their members were arrested for possessing cannabis. Bradley McIntosh, Paul Cattermole and Jon Lee were found carrying the drug in London's West End. They later apologised for the incident to their fans, saying they had been "stupid". The four other members of the band are Hannah Spearritt, Jo O'Meara, Rachel Stevens and Tina Barrett. The group are the brainchild of former Spice Girls manager Simon Fuller.</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### gooaq_pairs

* Dataset: [gooaq_pairs](https://huggingface.co/datasets/sentence-transformers/gooaq) at [b089f72](https://huggingface.co/datasets/sentence-transformers/gooaq/tree/b089f728748a068b7bc5234e5bcf5b25e3c8279c)
* Size: 20,000 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                           |
  |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                              |
  | details | <ul><li>min: 8 tokens</li><li>mean: 11.52 tokens</li><li>max: 19 tokens</li></ul> | <ul><li>min: 16 tokens</li><li>mean: 57.68 tokens</li><li>max: 121 tokens</li></ul> |
* Samples:
  | sentence1                                                          | sentence2                                                                                                                                                                                                                                                                                                    |
  |:-------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>gyan is called in english?</code>                            | <code>Gyan (Sanskrit), a Sanskrit word that roughly translates to 'knowledge' in English.</code>                                                                                                                                                                                                             |
  | <code>are mud baths good for dogs?</code>                          | <code>Mud has many benefits for your dog. It can soothe irritations by removing dead irritated skin. It can soothe hot spots. The mud applied to your dogs coat during the bath can help moisturize the skin and remove dandruff.</code>                                                                     |
  | <code>how many calories do you burn doing interval running?</code> | <code>Cost in Calories exerciser who performs sprint intervals at a speed of 12 mph for 20 minutes expends roughly 608 calories. To demonstrate how body weight affects calorie expenditure, a 250-lb. person burns approximately 845 calories using the identical sprint speed and workout duration.</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### paws-pos

* Dataset: [paws-pos](https://huggingface.co/datasets/google-research-datasets/paws) at [161ece9](https://huggingface.co/datasets/google-research-datasets/paws/tree/161ece9501cf0a11f3e48bd356eaa82de46d6a09)
* Size: 21,829 training samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                         |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            |
  | details | <ul><li>min: 9 tokens</li><li>mean: 25.38 tokens</li><li>max: 51 tokens</li></ul> | <ul><li>min: 9 tokens</li><li>mean: 25.41 tokens</li><li>max: 50 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                                          | sentence2                                                                                                                                                            |
  |:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>After some protests from the girl 's father , the man of Douji and Hime was killed before Suzu and her father 's corpse were consumed by the Orochi .</code> | <code>After some protesting from the girl 's father , the man was killed by Douji and Hime before Suzu and her father 's corpse were consumed by the Orochi .</code> |
  | <code>162 . Fighter Escadrille was a unit of the Łódź Army at the start of the Second World War . The unit was attached to the Polish Air Force .</code>           | <code>At the beginning of the Second World War , the Fighter Escadrille was a unit of the Łódź army , which was attached to the Polish Air Force .</code>            |
  | <code>The first music video for the album filmed and edited for the song 'Trust You ' was made by Pierre Bouvier-Patron of Friends Studio , London .</code>        | <code>The first music video for the album , filmed and edited for the song 'Trust You ' , was made by Pierre Bouvier-Patron of Friends Studio , London .</code>      |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

### Evaluation Datasets

#### vitaminc-pairs

* Dataset: [vitaminc-pairs](https://huggingface.co/datasets/tals/vitaminc) at [be6febb](https://huggingface.co/datasets/tals/vitaminc/tree/be6febb761b0b2807687e61e0b5282e459df2fa0)
* Size: 108 evaluation samples
* Columns: <code>claim</code> and <code>evidence</code>
* Approximate statistics based on the first 1000 samples:
  |         | claim                                                                             | evidence                                                                           |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 9 tokens</li><li>mean: 21.36 tokens</li><li>max: 41 tokens</li></ul> | <ul><li>min: 11 tokens</li><li>mean: 36.11 tokens</li><li>max: 79 tokens</li></ul> |
* Samples:
  | claim                                                                               | evidence                                                                                                                                                                                                                                                                                                                                               |
  |:------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>Dragon Con had over 5000 guests .</code>                                      | <code>Among the more than 6000 guests and musical performers at the 2009 convention were such notables as Patrick Stewart , William Shatner , Leonard Nimoy , Terry Gilliam , Bruce Boxleitner , James Marsters , and Mary McDonnell .</code>                                                                                                          |
  | <code>COVID-19 has reached more than 185 countries .</code>                         | <code>As of , more than cases of COVID-19 have been reported in more than 190 countries and 200 territories , resulting in more than deaths .</code>                                                                                                                                                                                                   |
  | <code>In March , Italy had 3.6x times more cases of coronavirus than China .</code> | <code>As of 12 March , among nations with at least one million citizens , Italy has the world 's highest per capita rate of positive coronavirus cases at 206.1 cases per million people ( 3.6x times the rate of China ) and is the country with the second-highest number of positive cases as well as of deaths in the world , after China .</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### negation-triplets

* Dataset: [negation-triplets](https://huggingface.co/datasets/jinaai/negation-dataset-v2)
* Size: 64 evaluation samples
* Columns: <code>anchor</code>, <code>entailment</code>, and <code>negative</code>
* Approximate statistics based on the first 1000 samples:
  |         | anchor                                                                             | entailment                                                                         | negative                                                                           |
  |:--------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                             | string                                                                             |
  | details | <ul><li>min: 10 tokens</li><li>mean: 13.88 tokens</li><li>max: 18 tokens</li></ul> | <ul><li>min: 10 tokens</li><li>mean: 13.31 tokens</li><li>max: 21 tokens</li></ul> | <ul><li>min: 10 tokens</li><li>mean: 13.64 tokens</li><li>max: 22 tokens</li></ul> |
* Samples:
  | anchor                                                                                           | entailment                                                                                   | negative                                                                                      |
  |:-------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------|
  | <code>1 military jet fighter flying in formation alongside a 1 military propeller pilot. </code> | <code>The two planes are different in design, but flying in a similar flight pattern.</code> | <code>The two planes are identical in design, but flying in different flight patterns.</code> |
  | <code>A random plane in the sky flying alone</code>                                              | <code>An airplane flying high in the blue sky. </code>                                       | <code>A helicopter flying low in the cloudy sky. </code>                                      |
  | <code>A picture of a white gas range with figurines above.</code>                                | <code>a white stove turned off with a digital clock</code>                                   | <code>a black stove turned on with a digital clock</code>                                     |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### scitail-pairs-pos

* Dataset: [scitail-pairs-pos](https://huggingface.co/datasets/allenai/scitail) at [0cc4353](https://huggingface.co/datasets/allenai/scitail/tree/0cc4353235b289165dfde1c7c5d1be983f99ce44)
* Size: 54 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 9 tokens</li><li>mean: 20.81 tokens</li><li>max: 45 tokens</li></ul> | <ul><li>min: 10 tokens</li><li>mean: 15.48 tokens</li><li>max: 23 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                                                                     | sentence2                                                                              |
  |:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------|
  | <code>humans normally have 23 pairs of chromosomes.</code>                                                                                                                                    | <code>Humans typically have 23 pairs pairs of chromosomes.</code>                      |
  | <code>A solution is a homogenous mixture of two or more substances that exist in a single phase.</code>                                                                                       | <code>Solution is the term for a homogeneous mixture of two or more substances.</code> |
  | <code>Upwelling The physical process in near-shore ocean systems of rising of nutrients and colder bottom waters to the surface because of constant wind patterns along the shoreline.</code> | <code>Upwelling is the term for when deep ocean water rises to the surface.</code>     |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### xsum-pairs

* Dataset: xsum-pairs
* Size: 128 evaluation samples
* Columns: <code>document</code> and <code>summary</code>
* Approximate statistics based on the first 1000 samples:
  |         | document                                                                             | summary                                                                            |
  |:--------|:-------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                               | string                                                                             |
  | details | <ul><li>min: 74 tokens</li><li>mean: 242.33 tokens</li><li>max: 374 tokens</li></ul> | <ul><li>min: 12 tokens</li><li>mean: 25.18 tokens</li><li>max: 38 tokens</li></ul> |
* Samples:
  | document                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       | summary                                                                                                                                                            |
  |:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>The region is already struggling to cope with a huge influx of migrants arriving from Tunisia.<br>Since January, at least 15,000 migrants have arrived, many of them landing on the tiny island of Lampedusa which is struggling to cope.<br>Thousands of people are living in basic camps on the island, leading to health concerns and rising local tensions.<br>"Until now the only migrants to arrive in Lampedusa were Tunisians," said Laura Boldrini a spokeswoman for the UN's refugee agency.<br>"This is the first boat coming from Libya with people fleeing the military escalation, the vendettas and the retaliation attacks," she said.<br>Overnight on Saturday, a boat carrying some 300 migrants was escorted by the Italian coastguard to Linosa,  an even smaller island some 50 km (35 miles) north of Lampedusa.<br>The passengers were mostly Somalis, Eritreans and Ethiopians and included a woman who had just given birth - she and the baby were flown to Lampedusa for medical care.<br>Several other boats from Libya, each carrying hundreds more migrants, are expected to reach Italy within hours.<br>Officials on Lampedusa, which is less than 160km from the Tunisian coast, have moved thousands of migrants to reception centres on the mainland, but some 5,000 remain.<br>The island's mayor has said he is desperate for help to relieve pressure on the island's very limited resources. Local people have said they are afraid of an outbreak of disease in the camps.<br>The Italian government has appealed to the international community for help.</code>                                                                                                                                | <code>Boatloads of migrants fleeing fighting in Libya are beginning to arrive in southern Italy, say officials.</code>                                             |
  | <code>The Belgium midfielder was one of a handful of players Mourinho had deemed to have underperformed this season.<br>Mourinho has also been under scrutiny, with the champions 15th in the league, 14 points behind leaders Leicester.<br>"I don't have a problem with him. We hope we can win a lot of trophies together," said Hazard, 24.<br>"Maybe not this season because it will be difficult, but next season and on."<br>The Blues moved above Norwich into 15th with a 1-0 win over the Canaries on Saturday - it was only their fourth victory in 13 league games this season.<br>Hazard, who was recently linked with a move to Real Madrid, has yet to score for the Blues in 18 appearances in all competitions in 2015-16.<br>He managed 20 last season en route to winning the Professional Footballers' Association and Football Writers' Association player of the year awards.<br>On his own form, he said: "I didn't start the season well. I tried to find out why, but I don't know.<br>"Sometimes you don't know. You have to keep going. I gave everything in training, on the pitch when I played.<br>"I hope I can get a lot of form and try to help the team win games."</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                   | <code>Eden Hazard has denied having a strained relationship with Chelsea boss Jose Mourinho and suggested he wants to stay at the Premier League champions.</code> |
  | <code>Amadou Gallo Fall, the NBA's vice-president for Africa, told the BBC the centre would train boys and girls aged between 16 and 18.<br>He said the centre would be part of its global network of elite training academies.<br>Several Africans have played for top teams in the NBA league.<br>Mr Fall, who is originally from Senegal, said the pan-African academy would use its network to scout for players from around the continent.<br>He said the players would be given access to facilities and resources available to elite players including nutritionists, personal coaches and physiotherapists.<br>The centre will be in Thies, 60km (40 miles) east of the capital, Dakar.<br>Senegal's national teams - men and women - have traditionally been among the strongest in Africa.<br>Mr Fall said those who don't make it to the NBA would have other avenues, such as "other great leagues around the world, including the NBA development league or in US universities".<br>He said there were 14 African-born players on the NBA opening roster this year, including Senegal's Gorgui Dieng and Cameroon's Pascal Siakam.<br>He added the NBA had a long association with the continent, citing legendary players such as Hakeem Olajuwon, Manute Bol and Dikembe Mutombo.<br>"That generation has paved the way and they've inspired and ushered a significant number of other young players over the years, a lot of them from Senegal," he said.<br>The NBA launched three academy centres in China in October, one in India last month and is planning to open another global centre based in Australia.<br>The NBA held its first game in Africa in 1 August 2015 in the South African city of Johannesburg.</code> | <code>The US National Basketball Association (NBA) has announced it will open its first African training academy in Senegal early next year.</code>                |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### sciq_pairs

* Dataset: [sciq_pairs](https://huggingface.co/datasets/allenai/sciq) at [2c94ad3](https://huggingface.co/datasets/allenai/sciq/tree/2c94ad3e1aafab77146f384e23536f97a4849815)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 8 tokens</li><li>mean: 16.24 tokens</li><li>max: 37 tokens</li></ul> | <ul><li>min: 2 tokens</li><li>mean: 71.48 tokens</li><li>max: 375 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                      | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                         |
  |:-----------------------------------------------------------------------------------------------------------------------------------------------|:----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>Water molds mostly live in water or moist?</code>                                                                                        | <code>Define physical change, and give examples of physical change.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                        |
  | <code>By allowing blood levels of a hormone to be regulated within a narrow range, feedback loops contribute to maintaining what state?</code> | <code>Role of Feedback Loops The contribution of feedback loops to homeostasis will only be briefly reviewed here. Positive feedback loops are characterized by the release of additional hormone in response to an original hormone release. The release of oxytocin during childbirth is a positive feedback loop. The initial release of oxytocin begins to signal the uterine muscles to contract, which pushes the fetus toward the cervix, causing it to stretch. This, in turn, signals the pituitary gland to release more oxytocin, causing labor contractions to intensify. The release of oxytocin decreases after the birth of the child. The more common method of hormone regulation is the negative feedback loop. Negative feedback is characterized by the inhibition of further secretion of a hormone in response to adequate levels of that hormone. This allows blood levels of the hormone to be regulated within a narrow range. An example of a negative feedback loop is the release of glucocorticoid hormones from the adrenal glands, as directed by the hypothalamus and pituitary gland. As glucocorticoid concentrations in the blood rise, the hypothalamus and pituitary gland reduce their signaling to the adrenal glands to prevent additional glucocorticoid secretion (Figure 17.6).</code> |
  | <code>What changes the chemical composition of a substance and can only occur through a chemical reaction?</code>                              | <code>Pure substances, such as compounds, can be separated through chemical changes. Chemical changes change the chemical composition of a substance and can only occur through a chemical reaction.</code>                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### qasc_pairs

* Dataset: [qasc_pairs](https://huggingface.co/datasets/allenai/qasc) at [a34ba20](https://huggingface.co/datasets/allenai/qasc/tree/a34ba204eb9a33b919c10cc08f4f1c8dae5ec070)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                          |
  |:--------|:----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                             |
  | details | <ul><li>min: 6 tokens</li><li>mean: 11.16 tokens</li><li>max: 22 tokens</li></ul> | <ul><li>min: 17 tokens</li><li>mean: 34.24 tokens</li><li>max: 55 tokens</li></ul> |
* Samples:
  | sentence1                                                                              | sentence2                                                                                                                                                                                                                |
  |:---------------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>what code proteins?</code>                                                       | <code>Chromosomes contain genes, which code for proteins.. Chromosomes are composed of DNA and proteins.. genes code proteins</code>                                                                                     |
  | <code>Furry animals grow thicker coats which has what impact on their survival?</code> | <code>staying warm has a positive impact on an animal 's survival. Furry animals grow thicker coats to keep warm in the winter.. Furry animals grow thicker coats which has a positive impact on their survival. </code> |
  | <code>Erosion can be caused by </code>                                                 | <code>heavy rains cause flooding. Flooding is problematic because it causes erosion problems.. heavy rains cause erosion</code>                                                                                          |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### openbookqa_pairs

* Dataset: openbookqa_pairs
* Size: 128 evaluation samples
* Columns: <code>question</code> and <code>fact</code>
* Approximate statistics based on the first 1000 samples:
  |         | question                                                                          | fact                                                                              |
  |:--------|:----------------------------------------------------------------------------------|:----------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                            |
  | details | <ul><li>min: 3 tokens</li><li>mean: 13.98 tokens</li><li>max: 47 tokens</li></ul> | <ul><li>min: 4 tokens</li><li>mean: 11.78 tokens</li><li>max: 28 tokens</li></ul> |
* Samples:
  | question                                                               | fact                                                                         |
  |:-----------------------------------------------------------------------|:-----------------------------------------------------------------------------|
  | <code>The thermal production of a stove is generically used for</code> | <code>a stove generates heat for cooking usually</code>                      |
  | <code>What creates a valley?</code>                                    | <code>a valley is formed by a river flowing</code>                           |
  | <code>when it turns day and night on a planet, what cause this?</code> | <code>a planet rotating causes cycles of day and night on that planet</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### msmarco_pairs

* Dataset: [msmarco_pairs](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3) at [28ff31e](https://huggingface.co/datasets/sentence-transformers/msmarco-msmarco-distilbert-base-v3/tree/28ff31e4c97cddd53d298497f766e653f1e666f9)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                        | sentence2                                                                           |
  |:--------|:---------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                           | string                                                                              |
  | details | <ul><li>min: 4 tokens</li><li>mean: 8.68 tokens</li><li>max: 32 tokens</li></ul> | <ul><li>min: 21 tokens</li><li>mean: 72.57 tokens</li><li>max: 159 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                                            | sentence2                                                                                                                                                                                                                                                                                                                                |
  |:---------------------------------------------------------------------------------------------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>what types of functions might you use for looking items up within your data? can you list any examples of where the formulas you chose might be useful?</code> | <code>Tip: Use MATCH instead of one of the LOOKUP functions when you need the position of an item in a range instead of the item itself. For example, you might use the MATCH function to provide a value for the row_num argument of the INDEX function.</code>                                                                         |
  | <code>ppt vehicle definition</code>                                                                                                                                  | <code>A policy purchased by vehicle owners to mitigate costs associated with getting into an auto accident. Instead of paying out of pocket for auto accidents, people pay annual premiums to an auto insurance company; the company then pays all or most of the costs associated with an auto accident or other vehicle damage.</code> |
  | <code>difference between integrated and dedicated graphics</code>                                                                                                    | <code>Key Difference: Dedicated and Integrated Graphics Cards are two types of graphics cards. The main difference between two is that the integrated graphics card comes built in to the computer. Whereas, the dedicated graphics card is an external attachment that must be connected to the motherboard.</code>                     |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### nq_pairs

* Dataset: [nq_pairs](https://huggingface.co/datasets/sentence-transformers/natural-questions) at [f9e894e](https://huggingface.co/datasets/sentence-transformers/natural-questions/tree/f9e894e1081e206e577b4eaa9ee6de2b06ae6f17)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                          | sentence2                                                                            |
  |:--------|:-----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                               |
  | details | <ul><li>min: 10 tokens</li><li>mean: 11.65 tokens</li><li>max: 18 tokens</li></ul> | <ul><li>min: 23 tokens</li><li>mean: 133.51 tokens</li><li>max: 299 tokens</li></ul> |
* Samples:
  | sentence1                                                                 | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                       |
  |:--------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>when was everything that rises must converge written</code>         | <code>Everything That Rises Must Converge Everything That Rises Must Converge is a collection of short stories written by Flannery O'Connor during the final decade of her life. The collection's eponymous story derives its name from the work of Pierre Teilhard de Chardin.[1][2] The collection was published posthumously in 1965 and contains an introduction by Robert Fitzgerald. Of the volume's nine stories, seven had been printed in magazines or literary journals prior to being collected. "Judgment Day" is a dramatically reworked version of "The Geranium," which was one of O'Connor's earliest publications and appeared in her graduate thesis at the University of Iowa. "Parker's Back," the collection's only completely new story, was a last-minute addition.</code>                                                                                                                                                                                               |
  | <code>what are the creatures in the woods american horror story</code>    | <code>American Horror Story: Asylum Dr. Arden is a former Nazi whose experiments have produced "Raspers", mutated former patients, who lurk in the woods surrounding the institution, and who are fed the flesh of dead patients. Dr. Thredson is assigned to evaluate Kit, who is accused of being the infamous serial killer 'Bloody Face' and believes his wife Alma (Britne Oldford) was abducted by aliens. Thredson also tries to "reform" Lana, who was an ambitious journalist attempting to expose Briarcliff's mistreatments of patients. She was in a relationship with Wendy (Clea Duvall), who was blackmailed by Sister Jude into committing Winters, before being killed by Bloody Face. Thredson helps Lana escape from the asylum, but she learns that Thredson is actually Bloody Face, and is kept prisoner. He rapes her and tries to kill her, but she manages to escape, only to end up back at Briarcliff. She later learns she is pregnant with Thredson's baby.</code> |
  | <code>what does the pink panther movie have to do with the cartoon</code> | <code>The Pink Panther The first film in the series derives its name from the eponymous pink diamond that has an enormous size and value. The diamond is called the "Pink Panther" because the flaw at its centre, when viewed closely, is said to resemble a leaping pink panther. The phrase reappears in the title of the fourth film The Return of the Pink Panther, in which the theft of the diamond is again the centre of the plot. The phrase was used for all the subsequent films in the series, even when the jewel did not figure in the plot. It ultimately appeared in six of the eleven films.</code>                                                                                                                                                                                                                                                                                                                                                                           |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### trivia_pairs

* Dataset: [trivia_pairs](https://huggingface.co/datasets/sentence-transformers/trivia-qa) at [a7c36e3](https://huggingface.co/datasets/sentence-transformers/trivia-qa/tree/a7c36e3c8c8c01526bc094d79bf80d4c848b0ad0)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                            |
  |:--------|:----------------------------------------------------------------------------------|:-------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                               |
  | details | <ul><li>min: 9 tokens</li><li>mean: 19.75 tokens</li><li>max: 54 tokens</li></ul> | <ul><li>min: 58 tokens</li><li>mean: 452.12 tokens</li><li>max: 512 tokens</li></ul> |
* Samples:
  | sentence1                                                                                           | sentence2                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                                     |
  |:----------------------------------------------------------------------------------------------------|:------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>In which country was Ursula Andrews born?</code>                                              | <code>Ursula Andress - Biography - IMDb Ursula Andress Biography Showing all 59 items Jump to: Overview  (3) | Mini Bio  (1) | Spouse  (1) | Trade Mark  (4) | Trivia  (34) | Personal Quotes  (15) | Salary  (1) Overview (3) 5' 5" (1.65 m) Mini Bio (1) The quintessential jet-set Euro starlet, Ursula Andress was born in the Swiss canton of Berne on March 19, 1936, one of six children in a strict German Protestant family. Although often seeming icily aloof, a restless streak early demonstrated itself in her personality, and she had an impetuous desire to explore the world outside Switzerland. (For instance, she was tracked down by Interpol for running away from boarding school at 17 years old.) The stunning young woman found work as an art model in Rome and did walk-ons in three quickie Italian features before coming to the Hollywood. At 19, she met fading matinée idol John Derek , who left his first wife and two children to marry Ursula in 1957, despite the fact that she only spoke a few words of English at the time, and persuaded the new bride to put her acting ambitions on hold for a few years. The year 1962 saw the virtually unknown Swiss beauty back on the set, playing a small role in the first movie version of Ian Fleming 's fanciful "James Bond" espionage novels, Dr. No (1962), opposite Sean Connery . Because her Swiss/German accent was so strong, Andress' entire performance had to be dubbed by a voiceover artist. Nevertheless, her striking beauty and smoldering screen presence made a strong impression on moviegoers, immediately establishing her as one of the most desired women in the world and as an ornament to put on-screen alongside some of the most bankable talent of the era, such as Elvis Presley in Fun in Acapulco (1963) and Dean Martin in 4 for Texas (1963). In 1965, she was one of several European starlets to co-star in What's New Pussycat (1965) -- a film that perhaps sums up mid-'60s pop culture better than any other -- written by Woody Allen , starring Allen and Peter Sellers , with music by Burt Bacharach , a title song performed by Tom Jones and much on-screen sexual romping. Andress appeared in many more racy-for-their time movies in both the United States and Europe from the mid-'60s to the late '70s, including The 10th Victim (1965), in which she wears a famously ballistic bra; The Blue Max (1966), where she is aptly cast as the sultry, insatiable wife of an aristocratic World War I German general; the James Bond satire Casino Royale (1967); the excellent crime caper Perfect Friday (1970); Red Sun (1971), as a foul-mouthed prostitute taken hostage by outlaws; The Sensuous Nurse (1975), as a bombshell nurse hired to titillate a doddering millionaire to death; the notorious The Mountain of the Cannibal God (1978), in which she is stripped and slathered in orange paint by a pair of nubiles; and The Fifth Musketeer (1979), in the role of King Louis XIV's alluring mistress. Unmarried since 1966 when she'd divorced Derek after falling in love with French actor Jean-Paul Belmondo on the Malaysian set of Up to His Ears (1965), Andress played the field for years, reportedly involved at various times with a host of others including (but by no means limited to) Ryan O'Neal , Marcello Mastroianni , Dennis Hopper and Fabio Testi . In 1979, she began what would be a long-term romance with Harry Hamlin , her handsome young co-star from Clash of the Titans (1981) (in which she was cast, predictably, as "Aphrodite"). While subsequently traveling in India, Andress' belly began to swell out of her clothing, and she felt very nauseous. What at first seemed a severe case of "Delhi Belly" turned out to be pregnancy, her first and only, at age 43. She and Hamlin named the child, who was born in 1980, Dimitri Hamlin . After the birth of her son, Andress scaled back her career, which now focused mostly on slight European films and occasional television roles, as she was raising Dimitri in Rome. Her relationship with Hamlin ended in 1983, and she last worked on a film in 2005. - IMDb Mini Biography By: Larry-115 Spouse (1) Seductive</code>  |
  | <code>What chemical element, symbol Cr, is named due its colourful/colorful compound effect?</code> | <code>Etymologies of element names | PlanetStar Wiki | Fandom powered by Wikia Etymologies of element names Share Ad blocker interference detected! Wikia is a free-to-use site that makes money from advertising. We have a modified experience for viewers using ad blockers Wikia is not accessible if you’ve made further modifications. Remove the custom ad blocker rule(s) and the page will load as expected. This is the list of 173 elements discussing their etymologies, including official, proposed, predicted, and made-up names. A subatomic particle neutron , since this element only contains neutron(s) in its nucleus . 1 Hydrogen H From the Latin hydor genes, derived from the Ancient Greek ὕδωρ γείνομαι (hydor geinomai), meaning "to beget water," because water is the most common and important hydrogen compound . 2 From the Greek ἥλιος ( Helios ), meaning " Sun " and also " the god of Sun " in mythology . 3 Lithium Li From the Greek λιθος (lithos), meaning "stone," because this element was discovered from a mineral while other common alkali metals (sodium and potassium) were discovered from plant tissue. 4 Beryllos, denoting " beryl ," which contains beryllium. 5 لاعقشا (buraq), derived from the Persian "بورون" (burah), referring to " borax ." 6 Carbon C From the French charbone, which in turn came from the Latin carbo, meaning "charcoal." (In German and Dutch , kohlenstoff and koolstof, respectively, both literally meaning " coal -stuff"). 7 N From the Latin nitrum genes, derived from the Greek νιτρον γείνομαι (nitron geinomai), meaning "native-soda ( niter ) forming." It is sometimes known as Azotum, which means "Ashdod", the English name of that element, and its symbol is sometimes known as "Az". 8 Oxygen O From the Greek οξύς γείνομαι (oxys geinomai), meaning "acid to bring forth," as he believed it to be an essential component of acids. 9 Fluorspar , one of its compounds. Fluor is the Latin for "flowing." 10 From the Greek νέος (neos), meaning "new." 11 Sodium Na From the English soda , used in names for sodium compounds such as caustic soda , soda ash , and baking soda . The symbol Na is from the Modern Latin name Natrium, derived from the Greek νιτρον (nítron), meaning "natural soda," a kind of salt. 12 From the Ancient Greek Μαγνήσια ( Magnesia ) (district in Thessaly ), where this element was discovered. 13 From the Latin alum , meaning "bitter salt." 14 From the Latin silex or silicis, meaning " flint ," a kind of stone. 15 Phosphorus P From the Greek φωσ φόρος (phós phoros), meaning "light bearer," because "white phosphorus" emits a faint glow upon exposure to oxygen. Phosphorus was an ancient name for the " planet Venus " as Phosphorus ( Morning Star ). 16 Sulfur S Almost certainly from the Arabic صفرا (sufra), "yellow," the bright color of the naturally occurring form. The word passed into Sanskrit गन्धक (sulvere or sulvari), the Latin "sulpur," the English "sulphur" or "sulfur," and also was commonly referred as "brimstone" in English translations of the Bible . 17 Chlorine Cl From the Greek χλώρος (chlorós), meaning "yellowish green" or "greenish yellow," because of the color of the gas. 18 Means "inactive" in Greek (literally "lazy"). 19 Potassium K From the English potash , meaning "pot-ash" ( potassium compound prepared from an alkali extracted in a pot from the ash of burnt wood or tree leaves). The symbol K is from this elment's Latin name, Kalium, derived from the Arabic القلي (al qalīy), meaning "calcined ashes." 20 Calcium Ca From the Latin calx, meaning "lime." Calcium was known as early as the first century when the Romans prepared lime as calcium oxide . 21 From the Latin Scandia, meaning " Scandinavia ." 22 Titanium Ti From the Greek τιτάν (titan), meaning " Earth ." Also from Titans , the first sons of Gaia in Greek mythology. 23 From Vanadis , a goddess in Scandinavian mythology , because of its beautifully multicolored chemical compounds. 24 From the Greek chroma, meaning " color ," because there are many colorful chromium compounds . 25 From the Latin magnes, meaning "magnet," for Magnetite or its magne</code> |
  | <code>What religion is the Dalai Lama?</code>                                                       | <code>BBC - Religions - Buddhism: Dalai Lama Dalai Lama Last updated 2006-09-21 The institution of the Dalai Lama is a relatively recent one. There have been only 14 Dalai Lamas in the history of Buddhism. On this page The rôle of the Dalai Lama Potala Palace, the Dalai Lama's residence until 1959 The Dalai Lama is the head monk of Tibetan Buddhism and traditionally has been responsible for the governing of Tibet, until the Chinese government took control in 1959. Before 1959, his official residence was Potala Palace in Lhasa, the capital of Tibet. The Dalai Lama belongs to the Gelugpa tradition of Tibetan Buddhism, which is the largest and most influential tradition in Tibet. The institution of the Dalai Lama is a relatively recent one. There have been only 14 Dalai Lamas in the history of Buddhism, and the first and second Dalai Lamas were given the title posthumously. According to Buddhist belief, the current Dalai Lama is a reincarnation of a past lama who decided to be reborn again to continue his important work, instead of moving on from the wheel of life. A person who decides to be continually reborn is known as tulku. Buddhists believe that the first tulku in this reincarnation was Gedun Drub, who lived from 1391-1474 and the second was Gendun Gyatso. However, the name Dalai Lama, meaning Ocean of Wisdom, was not conferred until the third reincarnation in the form of Sonam Gyatso in 1578. The current Dalai Lama is Tenzin Gyatso. Tenzin Gyatso, 14th Dalai Lama, as a child © Choosing a Dalai Lama After the death of a Dalai Lama it has traditionally been the responsibility of the High Lamas of the Gelugpa Tradition and the Tibetan government to find his reincarnation. The High Lamas search for a boy who was born around the same time as the death of the Dalai Lama. It can take around two or three years to identify the Dalai Lama, and for the current, 14th Dalai Lama, it was four years before he was found. There are several ways in which the High Lamas might find out where the next reincarnation will be found. Dream One of the High Lamas may dream about some mark or location that will identify the boy. Smoke If the previous Dalai Lama was cremated, High Lamas will watch the direction of the smoke and search accordingly. Oracle Lake High Lamas go to a holy lake, called Lhamo Lhatso, in central Tibet and watch for a sign from the lake itself. This may be either a vision or some indication of the direction in which to search.The home and village of Tenzin Gyatso was identified in a vision from this lake. Once the High Lamas have located the home and the boy, they present a number of artefacts which they have brought with them in preparation, to the child. Amongst these artefacts are a number of items that belonged to the deceased Dalai Lama. If the boy chooses the items that belonged to the previous Dalai Lama, this is seen as a sign, in conjunction with all of the other indications, that the boy is a reincarnation. This procedure, however, as Tenzin Gyatso has said himself, is not set in stone; if two thirds of the Tibetan people wish to change the method of identifying the next reincarnation, this would be just as valid. The search for the Dalai Lama has usually been limited to Tibet, although the third tulku was born in Mongolia. However, as Tibet has been taken by the Chinese government, Tenzin Gyatso says that if he is reborn it will not be in a country run by the People's Republic of China, or any other country which is not free. In order to see this content you need to have both Javascript enabled and Flash installed. Visit BBC Webwise for full instructions Interestingly, Tenzin Gyatso has also expressed doubts over whether he will be reborn at all, suggesting the function of the Dalai Lama may be over. However, until Tibet is reunited with its spiritual leader, it seems likely that there will continue to be a Dalai Lama. Top Tenzin Gyatso, the 14th Dalai Lama Tenzin Gyatso is the fourteenth Dalai Lama of Tibetan Buddhism. He was born in 1935 and recognised as the reincarnation of Thubten Gyatso at a young age. Tenzin</code>   |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### gooaq_pairs

* Dataset: [gooaq_pairs](https://huggingface.co/datasets/sentence-transformers/gooaq) at [b089f72](https://huggingface.co/datasets/sentence-transformers/gooaq/tree/b089f728748a068b7bc5234e5bcf5b25e3c8279c)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                         | sentence2                                                                           |
  |:--------|:----------------------------------------------------------------------------------|:------------------------------------------------------------------------------------|
  | type    | string                                                                            | string                                                                              |
  | details | <ul><li>min: 8 tokens</li><li>mean: 11.23 tokens</li><li>max: 17 tokens</li></ul> | <ul><li>min: 16 tokens</li><li>mean: 57.14 tokens</li><li>max: 107 tokens</li></ul> |
* Samples:
  | sentence1                                                 | sentence2                                                                                                                                                                                                                                                                 |
  |:----------------------------------------------------------|:--------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>who are the dc characters?</code>                   | <code>['Superman.', 'Batman. ... ', 'Flash. ... ', 'Green Lantern. ... ', 'Wonder Woman. ... ', 'Martian Manhunter. ... ', 'Aquaman. ... ', 'John Constantine. ... ']</code>                                                                                              |
  | <code>what restaurants are giving free food today?</code> | <code>['Burger King.', 'The Cheesecake Factory.', "Steak 'n Shake.", "Wendy's.", "TGI Friday's.", 'Panera.', "Moe's Southwest Grill."]</code>                                                                                                                             |
  | <code>who is paige on pretty little liars?</code>         | <code>McCullers is a character in Pretty Little Liars television series on ABC Family. She is portrayed by Lindsey Shaw. Paige is a talented swimmer and a pretty good fighter, as we see in This Is A Dark Ride. She is part of Rosewood High School's swim team.</code> |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

#### paws-pos

* Dataset: [paws-pos](https://huggingface.co/datasets/google-research-datasets/paws) at [161ece9](https://huggingface.co/datasets/google-research-datasets/paws/tree/161ece9501cf0a11f3e48bd356eaa82de46d6a09)
* Size: 128 evaluation samples
* Columns: <code>sentence1</code> and <code>sentence2</code>
* Approximate statistics based on the first 1000 samples:
  |         | sentence1                                                                          | sentence2                                                                          |
  |:--------|:-----------------------------------------------------------------------------------|:-----------------------------------------------------------------------------------|
  | type    | string                                                                             | string                                                                             |
  | details | <ul><li>min: 10 tokens</li><li>mean: 25.72 tokens</li><li>max: 42 tokens</li></ul> | <ul><li>min: 10 tokens</li><li>mean: 25.55 tokens</li><li>max: 41 tokens</li></ul> |
* Samples:
  | sentence1                                                                                                                                                      | sentence2                                                                                                                                                      |
  |:---------------------------------------------------------------------------------------------------------------------------------------------------------------|:---------------------------------------------------------------------------------------------------------------------------------------------------------------|
  | <code>They were there to enjoy us and they were there to pray for us .</code>                                                                                  | <code>They were there for us to enjoy and they were there for us to pray .</code>                                                                              |
  | <code>After the end of the war in June 1902 , Higgins left Southampton in the `` SSBavarian '' in August , returning to Cape Town the following month .</code> | <code>In August , after the end of the war in June 1902 , Higgins Southampton left the `` SSBavarian '' and returned to Cape Town the following month .</code> |
  | <code>From the merger of the Four Rivers Council and the Audubon Council , the Shawnee Trails Council was born .</code>                                        | <code>Shawnee Trails Council was formed from the merger of the Four Rivers Council and the Audubon Council .</code>                                            |
* Loss: [<code>CachedGISTEmbedLoss</code>](https://sbert.net/docs/package_reference/sentence_transformer/losses.html#cachedgistembedloss) with these parameters:
  ```json
  {'guide': SentenceTransformer(
    (0): Transformer({'max_seq_length': 512, 'do_lower_case': False}) with Transformer model: BertModel 
    (1): Pooling({'word_embedding_dimension': 768, 'pooling_mode_cls_token': True, 'pooling_mode_mean_tokens': False, 'pooling_mode_max_tokens': False, 'pooling_mode_mean_sqrt_len_tokens': False, 'pooling_mode_weightedmean_tokens': False, 'pooling_mode_lasttoken': False, 'include_prompt': True})
    (2): Normalize()
  ), 'temperature': 0.05}
  ```

### Training Hyperparameters
#### Non-Default Hyperparameters

- `eval_strategy`: steps
- `per_device_train_batch_size`: 160
- `per_device_eval_batch_size`: 64
- `gradient_accumulation_steps`: 8
- `learning_rate`: 4e-05
- `weight_decay`: 0.0001
- `lr_scheduler_type`: cosine_with_min_lr
- `lr_scheduler_kwargs`: {'num_cycles': 0.5, 'min_lr': 1.3333333333333335e-05}
- `warmup_ratio`: 0.33
- `save_safetensors`: False
- `fp16`: True
- `push_to_hub`: True
- `hub_model_id`: bobox/DeBERTa-small-ST-v1-toytest-checkpoints-tmp
- `hub_strategy`: all_checkpoints
- `batch_sampler`: no_duplicates

#### All Hyperparameters
<details><summary>Click to expand</summary>

- `overwrite_output_dir`: False
- `do_predict`: False
- `eval_strategy`: steps
- `prediction_loss_only`: True
- `per_device_train_batch_size`: 160
- `per_device_eval_batch_size`: 64
- `per_gpu_train_batch_size`: None
- `per_gpu_eval_batch_size`: None
- `gradient_accumulation_steps`: 8
- `eval_accumulation_steps`: None
- `learning_rate`: 4e-05
- `weight_decay`: 0.0001
- `adam_beta1`: 0.9
- `adam_beta2`: 0.999
- `adam_epsilon`: 1e-08
- `max_grad_norm`: 1.0
- `num_train_epochs`: 3
- `max_steps`: -1
- `lr_scheduler_type`: cosine_with_min_lr
- `lr_scheduler_kwargs`: {'num_cycles': 0.5, 'min_lr': 1.3333333333333335e-05}
- `warmup_ratio`: 0.33
- `warmup_steps`: 0
- `log_level`: passive
- `log_level_replica`: warning
- `log_on_each_node`: True
- `logging_nan_inf_filter`: True
- `save_safetensors`: False
- `save_on_each_node`: False
- `save_only_model`: False
- `restore_callback_states_from_checkpoint`: False
- `no_cuda`: False
- `use_cpu`: False
- `use_mps_device`: False
- `seed`: 42
- `data_seed`: None
- `jit_mode_eval`: False
- `use_ipex`: False
- `bf16`: False
- `fp16`: True
- `fp16_opt_level`: O1
- `half_precision_backend`: auto
- `bf16_full_eval`: False
- `fp16_full_eval`: False
- `tf32`: None
- `local_rank`: 0
- `ddp_backend`: None
- `tpu_num_cores`: None
- `tpu_metrics_debug`: False
- `debug`: []
- `dataloader_drop_last`: False
- `dataloader_num_workers`: 0
- `dataloader_prefetch_factor`: None
- `past_index`: -1
- `disable_tqdm`: False
- `remove_unused_columns`: True
- `label_names`: None
- `load_best_model_at_end`: False
- `ignore_data_skip`: False
- `fsdp`: []
- `fsdp_min_num_params`: 0
- `fsdp_config`: {'min_num_params': 0, 'xla': False, 'xla_fsdp_v2': False, 'xla_fsdp_grad_ckpt': False}
- `fsdp_transformer_layer_cls_to_wrap`: None
- `accelerator_config`: {'split_batches': False, 'dispatch_batches': None, 'even_batches': True, 'use_seedable_sampler': True, 'non_blocking': False, 'gradient_accumulation_kwargs': None}
- `deepspeed`: None
- `label_smoothing_factor`: 0.0
- `optim`: adamw_torch
- `optim_args`: None
- `adafactor`: False
- `group_by_length`: False
- `length_column_name`: length
- `ddp_find_unused_parameters`: None
- `ddp_bucket_cap_mb`: None
- `ddp_broadcast_buffers`: False
- `dataloader_pin_memory`: True
- `dataloader_persistent_workers`: False
- `skip_memory_metrics`: True
- `use_legacy_prediction_loop`: False
- `push_to_hub`: True
- `resume_from_checkpoint`: None
- `hub_model_id`: bobox/DeBERTa-small-ST-v1-toytest-checkpoints-tmp
- `hub_strategy`: all_checkpoints
- `hub_private_repo`: False
- `hub_always_push`: False
- `gradient_checkpointing`: False
- `gradient_checkpointing_kwargs`: None
- `include_inputs_for_metrics`: False
- `eval_do_concat_batches`: True
- `fp16_backend`: auto
- `push_to_hub_model_id`: None
- `push_to_hub_organization`: None
- `mp_parameters`: 
- `auto_find_batch_size`: False
- `full_determinism`: False
- `torchdynamo`: None
- `ray_scope`: last
- `ddp_timeout`: 1800
- `torch_compile`: False
- `torch_compile_backend`: None
- `torch_compile_mode`: None
- `dispatch_batches`: None
- `split_batches`: None
- `include_tokens_per_second`: False
- `include_num_input_tokens_seen`: False
- `neftune_noise_alpha`: None
- `optim_target_modules`: None
- `batch_eval_metrics`: False
- `eval_on_start`: False
- `batch_sampler`: no_duplicates
- `multi_dataset_batch_sampler`: proportional

</details>

### Training Logs
<details><summary>Click to expand</summary>

| Epoch  | Step | Training Loss | vitaminc-pairs loss | trivia pairs loss | xsum-pairs loss | paws-pos loss | sciq pairs loss | msmarco pairs loss | openbookqa pairs loss | gooaq pairs loss | nq pairs loss | scitail-pairs-pos loss | qasc pairs loss | negation-triplets loss | NLI-v2_max_accuracy | VitaminC_max_ap | sts-test_spearman_cosine |
|:------:|:----:|:-------------:|:-------------------:|:-----------------:|:---------------:|:-------------:|:---------------:|:------------------:|:---------------------:|:----------------:|:-------------:|:----------------------:|:---------------:|:----------------------:|:-------------------:|:---------------:|:------------------------:|
| 0.0169 | 3    | 7.2372        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.0339 | 6    | 6.855         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.0508 | 9    | 7.4707        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.0677 | 12   | 7.0187        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.0847 | 15   | 6.6756        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.1016 | 18   | 6.0155        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.1186 | 21   | 6.1644        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.1355 | 24   | 6.2158        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.1524 | 27   | 6.1369        | 2.6986              | 6.3356            | 6.0730          | 2.2308        | 0.3450          | 6.9377             | 4.4060                | 6.4060           | 6.7941        | 1.9217                 | 3.2268          | 5.1429                 | 1.0                 | 0.5356          | 0.1067                   |
| 0.1694 | 30   | 5.7653        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.1863 | 33   | 6.1259        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2032 | 36   | 5.7539        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2202 | 39   | 6.0131        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2371 | 42   | 6.0074        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2541 | 45   | 5.7125        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2710 | 48   | 5.5634        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.2879 | 51   | 5.2924        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.3049 | 54   | 5.2286        | 2.6647              | 5.6474            | 5.2498          | 0.8336        | 0.2962          | 5.2464             | 3.8855                | 5.2259           | 5.3326        | 1.2414                 | 2.5309          | 4.6218                 | 1.0                 | 0.5225          | 0.1969                   |
| 0.3218 | 57   | 4.4811        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.3387 | 60   | 4.4239        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.3557 | 63   | 4.0273        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.3726 | 66   | 3.4508        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.3896 | 69   | 3.9702        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.4065 | 72   | 3.5295        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.4234 | 75   | 3.6395        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.4404 | 78   | 3.2398        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.4573 | 81   | 3.116         | 2.5044              | 3.1392            | 2.4290          | 0.1975        | 0.1526          | 2.9677             | 2.4785                | 2.8775           | 3.3587        | 0.2785                 | 1.2902          | 3.4229                 | 1.0                 | 0.5306          | 0.4892                   |
| 0.4742 | 84   | 2.6049        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.4912 | 87   | 2.7738        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5081 | 90   | 2.5416        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5251 | 93   | 2.3913        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5420 | 96   | 2.3144        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5589 | 99   | 2.1857        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5759 | 102  | 1.8881        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.5928 | 105  | 2.2699        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.6097 | 108  | 2.1425        | 2.7217              | 1.7080            | 1.2066          | 0.0800        | 0.0949          | 1.6446             | 1.5739                | 1.7924           | 2.3649        | 0.2329                 | 0.8462          | 2.3389                 | 1.0                 | 0.5323          | 0.7806                   |
| 0.6267 | 111  | 2.1276        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.6436 | 114  | 1.7531        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.6606 | 117  | 2.0179        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.6775 | 120  | 1.5305        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.6944 | 123  | 1.6925        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.7114 | 126  | 1.5248        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.7283 | 129  | 1.523         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.7452 | 132  | 1.5474        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.7622 | 135  | 1.7221        | 2.8521              | 1.4495            | 0.7707          | 0.0601        | 0.0751          | 1.1524             | 1.4015                | 1.3955           | 1.7769        | 0.2150                 | 0.6356          | 2.0742                 | 1.0                 | 0.5327          | 0.8315                   |
| 0.7791 | 138  | 1.5366        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.7960 | 141  | 1.3045        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8130 | 144  | 1.1999        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8299 | 147  | 1.3483        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8469 | 150  | 1.2009        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8638 | 153  | 1.4495        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8807 | 156  | 1.2329        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.8977 | 159  | 1.1905        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.9146 | 162  | 1.277         | 2.7764              | 1.2929            | 0.5587          | 0.0525        | 0.0604          | 0.8656             | 1.1903                | 1.1581           | 1.1554        | 0.1988                 | 0.4943          | 2.0055                 | 1.0                 | 0.5311          | 0.8548                   |
| 0.9315 | 165  | 1.339         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.9485 | 168  | 1.1535        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.9654 | 171  | 1.1643        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.9824 | 174  | 1.2221        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 0.9993 | 177  | 1.0974        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.0162 | 180  | 1.0984        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.0332 | 183  | 1.0543        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.0501 | 186  | 1.0994        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.0670 | 189  | 1.0621        | 2.6755              | 1.2004            | 0.3837          | 0.0421        | 0.0556          | 0.6897             | 1.0837                | 1.0353           | 0.9604        | 0.1854                 | 0.4047          | 1.9071                 | 1.0                 | 0.5420          | 0.8680                   |
| 1.0840 | 192  | 0.8724        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1009 | 195  | 0.9381        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1179 | 198  | 0.9617        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1348 | 201  | 1.0139        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1517 | 204  | 1.1073        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1687 | 207  | 0.8365        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.1856 | 210  | 1.1012        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.2025 | 213  | 1.0016        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.2195 | 216  | 1.0957        | 2.5466              | 1.1412            | 0.3591          | 0.0395        | 0.0517          | 0.5819             | 0.9366                | 0.9686           | 0.8172        | 0.1901                 | 0.3075          | 1.9161                 | 1.0                 | 0.5385          | 0.8656                   |
| 1.2364 | 219  | 1.1273        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.2534 | 222  | 1.2568        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.2703 | 225  | 0.873         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.2872 | 228  | 1.0003        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.3042 | 231  | 1.142         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.3211 | 234  | 0.807         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.3380 | 237  | 1.0231        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.3550 | 240  | 0.797         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.3719 | 243  | 0.8473        | 2.5140              | 1.1067            | 0.2802          | 0.0343        | 0.0467          | 0.5559             | 0.8562                | 0.8929           | 0.7435        | 0.1750                 | 0.2355          | 1.8629                 | 1.0                 | 0.5508          | 0.8687                   |
| 1.3888 | 246  | 0.9531        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4058 | 249  | 0.9023        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4227 | 252  | 0.8922        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4397 | 255  | 0.9874        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4566 | 258  | 0.8508        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4735 | 261  | 0.7149        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.4905 | 264  | 0.894         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.5074 | 267  | 0.867         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.5243 | 270  | 0.7493        | 2.5574              | 1.0634            | 0.2217          | 0.0319        | 0.0435          | 0.5027             | 0.7999                | 0.8005           | 0.6530        | 0.1693                 | 0.2443          | 1.8535                 | 1.0                 | 0.5499          | 0.8716                   |
| 1.5413 | 273  | 0.7974        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.5582 | 276  | 0.797         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.5752 | 279  | 0.6749        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.5921 | 282  | 0.9325        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.6090 | 285  | 0.8418        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.6260 | 288  | 1.0135        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.6429 | 291  | 0.6961        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.6598 | 294  | 0.9361        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.6768 | 297  | 0.6747        | 2.4871              | 0.9762            | 0.2242          | 0.0291        | 0.0396          | 0.5025             | 0.7668                | 0.7546           | 0.6427        | 0.1596                 | 0.1963          | 1.7349                 | 1.0                 | 0.5461          | 0.8787                   |
| 1.6937 | 300  | 0.7786        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7107 | 303  | 0.7171        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7276 | 306  | 0.6627        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7445 | 309  | 0.6711        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7615 | 312  | 0.9076        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7784 | 315  | 0.7414        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.7953 | 318  | 0.582         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.8123 | 321  | 0.6068        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.8292 | 324  | 0.6219        | 2.5197              | 1.0206            | 0.1630          | 0.0273        | 0.0383          | 0.4859             | 0.7109                | 0.7736           | 0.5533        | 0.1535                 | 0.2044          | 1.7016                 | 1.0                 | 0.5532          | 0.8807                   |
| 1.8462 | 327  | 0.5862        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.8631 | 330  | 0.678         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.8800 | 333  | 0.6272        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.8970 | 336  | 0.5048        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.9139 | 339  | 0.7653        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.9308 | 342  | 0.6613        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.9478 | 345  | 0.6122        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.9647 | 348  | 0.5939        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 1.9817 | 351  | 0.6923        | 2.4379              | 0.9582            | 0.1464          | 0.0264        | 0.0382          | 0.4348             | 0.7554                | 0.7220           | 0.5432        | 0.1481                 | 0.1640          | 1.7345                 | 1.0                 | 0.5560          | 0.8837                   |
| 1.9986 | 354  | 0.5712        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.0155 | 357  | 0.5969        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.0325 | 360  | 0.5881        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.0494 | 363  | 0.6005        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.0663 | 366  | 0.6066        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.0833 | 369  | 0.4921        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.1002 | 372  | 0.5354        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.1171 | 375  | 0.5602        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.1341 | 378  | 0.5686        | 2.3908              | 0.9614            | 0.1454          | 0.0271        | 0.0374          | 0.4246             | 0.7796                | 0.6965           | 0.5298        | 0.1401                 | 0.1604          | 1.7678                 | 1.0                 | 0.5539          | 0.8804                   |
| 2.1510 | 381  | 0.6496        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.1680 | 384  | 0.4713        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.1849 | 387  | 0.6345        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2018 | 390  | 0.5994        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2188 | 393  | 0.6763        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2357 | 396  | 0.7254        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2526 | 399  | 0.8032        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2696 | 402  | 0.4914        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.2865 | 405  | 0.6307        | 2.4388              | 0.9862            | 0.1308          | 0.0262        | 0.0379          | 0.3928             | 0.7434                | 0.6976           | 0.4998        | 0.1192                 | 0.1466          | 1.7093                 | 1.0                 | 0.5533          | 0.8859                   |
| 2.3035 | 408  | 0.7493        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.3204 | 411  | 0.5139        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.3373 | 414  | 0.6364        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.3543 | 417  | 0.4763        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.3712 | 420  | 0.583         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.3881 | 423  | 0.5912        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.4051 | 426  | 0.5936        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.4220 | 429  | 0.5959        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.4390 | 432  | 0.676         | 2.4265              | 0.9634            | 0.1220          | 0.0260        | 0.0362          | 0.4292             | 0.7433                | 0.6771           | 0.4752        | 0.1282                 | 0.1304          | 1.6943                 | 1.0                 | 0.5532          | 0.8878                   |
| 2.4559 | 435  | 0.5622        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.4728 | 438  | 0.4633        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.4898 | 441  | 0.5955        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5067 | 444  | 0.6271        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5236 | 447  | 0.4988        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5406 | 450  | 0.519         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5575 | 453  | 0.5538        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5745 | 456  | 0.4826        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.5914 | 459  | 0.6322        | 2.4541              | 0.9231            | 0.1224          | 0.0253        | 0.0345          | 0.4048             | 0.7595                | 0.6607           | 0.4713        | 0.1168                 | 0.1323          | 1.7024                 | 1.0                 | 0.5557          | 0.8868                   |
| 2.6083 | 462  | 0.6342        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.6253 | 465  | 0.7012        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.6422 | 468  | 0.4175        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.6591 | 471  | 0.7575        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.6761 | 474  | 0.4687        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.6930 | 477  | 0.5907        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.7100 | 480  | 0.4796        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.7269 | 483  | 0.4809        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.7438 | 486  | 0.4696        | 2.4899              | 0.9546            | 0.1169          | 0.0247        | 0.0343          | 0.4138             | 0.7444                | 0.6688           | 0.4838        | 0.1166                 | 0.1279          | 1.6605                 | 1.0                 | 0.5527          | 0.8883                   |
| 2.7608 | 489  | 0.6588        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.7777 | 492  | 0.5675        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.7946 | 495  | 0.4007        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8116 | 498  | 0.4476        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8285 | 501  | 0.433         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8454 | 504  | 0.4154        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8624 | 507  | 0.5416        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8793 | 510  | 0.4546        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.8963 | 513  | 0.3326        | 2.4924              | 0.9493            | 0.1071          | 0.0248        | 0.0344          | 0.4033             | 0.7376                | 0.6558           | 0.4478        | 0.1148                 | 0.1219          | 1.6918                 | 1.0                 | 0.5534          | 0.8907                   |
| 2.9132 | 516  | 0.594         | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.9301 | 519  | 0.4727        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.9471 | 522  | 0.4701        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.9640 | 525  | 0.4606        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.9809 | 528  | 0.5025        | -                   | -                 | -               | -             | -               | -                  | -                     | -                | -             | -                      | -               | -                      | -                   | -               | -                        |
| 2.9979 | 531  | 0.4314        | 2.4532              | 0.9270            | 0.1131          | 0.0247        | 0.0344          | 0.3951             | 0.7123                | 0.6345           | 0.4383        | 0.1143                 | 0.1159          | 1.7003                 | 1.0                 | 0.5539          | 0.8904                   |

</details>

### Framework Versions
- Python: 3.10.13
- Sentence Transformers: 3.0.1
- Transformers: 4.42.3
- PyTorch: 2.1.2
- Accelerate: 0.32.1
- Datasets: 2.20.0
- Tokenizers: 0.19.1

## Citation

### BibTeX

#### Sentence Transformers
```bibtex
@inproceedings{reimers-2019-sentence-bert,
    title = "Sentence-BERT: Sentence Embeddings using Siamese BERT-Networks",
    author = "Reimers, Nils and Gurevych, Iryna",
    booktitle = "Proceedings of the 2019 Conference on Empirical Methods in Natural Language Processing",
    month = "11",
    year = "2019",
    publisher = "Association for Computational Linguistics",
    url = "https://arxiv.org/abs/1908.10084",
}
```

<!--
## Glossary

*Clearly define terms in order to be accessible across audiences.*
-->

<!--
## Model Card Authors

*Lists the people who create the model card, providing recognition and accountability for the detailed work that goes into its construction.*
-->

<!--
## Model Card Contact

*Provides a way for people who have updates to the Model Card, suggestions, or questions, to contact the Model Card authors.*
-->