File size: 3,209 Bytes
e1b3d32 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
eval/beir-arguana_ndcg@10 = 0.38653 eval/beir-arguana_recall@100 = 0.94097 eval/beir-avg_ndcg@10 = 0.3795 eval/beir-avg_recall@10 = 0.436824 eval/beir-avg_recall@100 = 0.6509560000000001 eval/beir-avg_recall@20 = 0.501232 eval/beir-climate-fever_ndcg@10 = 0.17804 eval/beir-climate-fever_recall@10 = 0.21169 eval/beir-climate-fever_recall@100 = 0.47264 eval/beir-climate-fever_recall@20 = 0.27416 eval/beir-cqadupstack_ndcg@10 = 0.2928333333333334 eval/beir-cqadupstack_recall@100 = 0.6243891666666667 eval/beir-dbpedia-entity_ndcg@10 = 0.31098 eval/beir-dbpedia-entity_recall@10 = 0.20823 eval/beir-dbpedia-entity_recall@100 = 0.45154 eval/beir-dbpedia-entity_recall@20 = 0.26649 eval/beir-fever_ndcg@10 = 0.63587 eval/beir-fever_recall@10 = 0.80061 eval/beir-fever_recall@100 = 0.91485 eval/beir-fever_recall@20 = 0.85087 eval/beir-fiqa_ndcg@10 = 0.26763 eval/beir-fiqa_recall@100 = 0.6111 eval/beir-hotpotqa_ndcg@10 = 0.5397 eval/beir-hotpotqa_recall@10 = 0.56394 eval/beir-hotpotqa_recall@100 = 0.70702 eval/beir-hotpotqa_recall@20 = 0.6106 eval/beir-msmarco_ndcg@10 = 0.23291 eval/beir-msmarco_recall@10 = 0.39965 eval/beir-msmarco_recall@100 = 0.70873 eval/beir-msmarco_recall@20 = 0.50404 eval/beir-nfcorpus_ndcg@10 = 0.29881 eval/beir-nfcorpus_recall@100 = 0.28269 eval/beir-nq_ndcg@10 = 0.29837 eval/beir-nq_recall@100 = 0.81825 eval/beir-quora_ndcg@10 = 0.78119 eval/beir-quora_recall@100 = 0.97876 eval/beir-scidocs_ndcg@10 = 0.16627 eval/beir-scidocs_recall@100 = 0.38372 eval/beir-scifact_ndcg@10 = 0.65554 eval/beir-scifact_recall@100 = 0.91489 eval/beir-trec-covid_ndcg@10 = 0.52738 eval/beir-trec-covid_recall@100 = 0.10192 eval/beir-webis-touche2020_ndcg@10 = 0.20204 eval/beir-webis-touche2020_recall@100 = 0.429 eval/qa-curatedtrec-test-acc@100 = 0.9279538904899135 eval/qa-curatedtrec-test-acc@20 = 0.8530259365994236 eval/qa-curatedtrec-test-acc@5 = 0.6829971181556196 eval/qa-entityqs-macro-acc@100 = 0.7783879340126217 eval/qa-entityqs-macro-acc@20 = 0.6737523540999139 eval/qa-entityqs-macro-acc@5 = 0.5504056340521896 eval/qa-nq-test-acc@100 = 0.8096952908587257 eval/qa-nq-test-acc@20 = 0.6709141274238227 eval/qa-nq-test-acc@5 = 0.48337950138504154 eval/qa-squad1-test-acc@100 = 0.7699148533585619 eval/qa-squad1-test-acc@20 = 0.6233680227057711 eval/qa-squad1-test-acc@5 = 0.4357615894039735 eval/qa-trivia-test-acc@100 = 0.8389463449129321 eval/qa-trivia-test-acc@20 = 0.7557677008750995 eval/qa-trivia-test-acc@5 = 0.6224697250950234 eval/qa-webq-test-acc@100 = 0.8139763779527559 eval/qa-webq-test-acc@20 = 0.6973425196850394 eval/qa-webq-test-acc@5 = 0.4867125984251969 eval/senteval-CR = 87.87 eval/senteval-MPQA = 89.07 eval/senteval-MR = 81.81 eval/senteval-MRPC = 70.39 eval/senteval-SICKRelatedness = 0.6992260214830822 eval/senteval-SST2 = 84.4 eval/senteval-STS12 = 0.5812709943392725 eval/senteval-STS13 = 0.7340343966035406 eval/senteval-STS14 = 0.6371047443411619 eval/senteval-STS15 = 0.7672378740860561 eval/senteval-STS16 = 0.7833717936510003 eval/senteval-STSBenchmark = 0.7295849518142924 eval/senteval-SUBJ = 95.31 eval/senteval-TREC = 80.43 eval/senteval-avg_sts_7 = 0.7045472537597723 eval/senteval-avg_transfer = 84.18285714285716 train/global_step = 100000 |