File size: 3,196 Bytes
444729b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
eval/beir-arguana_ndcg@10 = 0.37881
eval/beir-arguana_recall@100 = 0.9175
eval/beir-avg_ndcg@10 = 0.36036
eval/beir-avg_recall@10 = 0.42374800000000007
eval/beir-avg_recall@100 = 0.651238
eval/beir-avg_recall@20 = 0.49372999999999995
eval/beir-climate-fever_ndcg@10 = 0.17148
eval/beir-climate-fever_recall@10 = 0.21584
eval/beir-climate-fever_recall@100 = 0.47004
eval/beir-climate-fever_recall@20 = 0.28686
eval/beir-cqadupstack_ndcg@10 = 0.2803316666666667
eval/beir-cqadupstack_recall@100 = 0.604765
eval/beir-dbpedia-entity_ndcg@10 = 0.30952
eval/beir-dbpedia-entity_recall@10 = 0.21
eval/beir-dbpedia-entity_recall@100 = 0.47187
eval/beir-dbpedia-entity_recall@20 = 0.2695
eval/beir-fever_ndcg@10 = 0.59083
eval/beir-fever_recall@10 = 0.77263
eval/beir-fever_recall@100 = 0.90212
eval/beir-fever_recall@20 = 0.82815
eval/beir-fiqa_ndcg@10 = 0.25271
eval/beir-fiqa_recall@100 = 0.57736
eval/beir-hotpotqa_ndcg@10 = 0.51242
eval/beir-hotpotqa_recall@10 = 0.54774
eval/beir-hotpotqa_recall@100 = 0.71269
eval/beir-hotpotqa_recall@20 = 0.60459
eval/beir-msmarco_ndcg@10 = 0.21755
eval/beir-msmarco_recall@10 = 0.37253
eval/beir-msmarco_recall@100 = 0.69947
eval/beir-msmarco_recall@20 = 0.47955
eval/beir-nfcorpus_ndcg@10 = 0.29721
eval/beir-nfcorpus_recall@100 = 0.27314
eval/beir-nq_ndcg@10 = 0.28932
eval/beir-nq_recall@100 = 0.80159
eval/beir-quora_ndcg@10 = 0.81279
eval/beir-quora_recall@100 = 0.98333
eval/beir-scidocs_ndcg@10 = 0.1582
eval/beir-scidocs_recall@100 = 0.37162
eval/beir-scifact_ndcg@10 = 0.63462
eval/beir-scifact_recall@100 = 0.90322
eval/beir-trec-covid_ndcg@10 = 0.5527
eval/beir-trec-covid_recall@100 = 0.10295
eval/beir-webis-touche2020_ndcg@10 = 0.18394
eval/beir-webis-touche2020_recall@100 = 0.44207
eval/qa-curatedtrec-test-acc@100 = 0.9351585014409222
eval/qa-curatedtrec-test-acc@20 = 0.8515850144092219
eval/qa-curatedtrec-test-acc@5 = 0.7046109510086456
eval/qa-entityqs-macro-acc@100 = 0.7602876877587357
eval/qa-entityqs-macro-acc@20 = 0.6447794141198262
eval/qa-entityqs-macro-acc@5 = 0.5100471129421901
eval/qa-nq-test-acc@100 = 0.8013850415512466
eval/qa-nq-test-acc@20 = 0.6750692520775623
eval/qa-nq-test-acc@5 = 0.4692520775623269
eval/qa-squad1-test-acc@100 = 0.7788079470198676
eval/qa-squad1-test-acc@20 = 0.6403973509933775
eval/qa-squad1-test-acc@5 = 0.4489120151371807
eval/qa-trivia-test-acc@100 = 0.8371784672500663
eval/qa-trivia-test-acc@20 = 0.7591266684345443
eval/qa-trivia-test-acc@5 = 0.6233536639264563
eval/qa-webq-test-acc@100 = 0.8188976377952756
eval/qa-webq-test-acc@20 = 0.7052165354330708
eval/qa-webq-test-acc@5 = 0.5068897637795275
eval/senteval-CR = 87.01
eval/senteval-MPQA = 88.89
eval/senteval-MR = 81.09
eval/senteval-MRPC = 70.8
eval/senteval-SICKRelatedness = 0.7025121676824595
eval/senteval-SST2 = 84.06
eval/senteval-STS12 = 0.6198695741812278
eval/senteval-STS13 = 0.7458274512781141
eval/senteval-STS14 = 0.6864467990669687
eval/senteval-STS15 = 0.7986117512115241
eval/senteval-STS16 = 0.7823935956761426
eval/senteval-STSBenchmark = 0.7668425138024548
eval/senteval-SUBJ = 95.34
eval/senteval-TREC = 82.7
eval/senteval-avg_sts_7 = 0.7289291218426988
eval/senteval-avg_transfer = 84.27
train/global_step = 100000