YanshekWoo
commited on
Commit
•
45e42c8
1
Parent(s):
d06313e
init
Browse files
README.md
CHANGED
@@ -12069,6 +12069,63 @@ model-index:
|
|
12069 |
value: 89.12
|
12070 |
task:
|
12071 |
type: Classification
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12072 |
- dataset:
|
12073 |
config: default
|
12074 |
name: MTEB AlloprofRetrieval
|
@@ -12360,6 +12417,23 @@ model-index:
|
|
12360 |
value: 60.363
|
12361 |
task:
|
12362 |
type: Retrieval
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12363 |
- dataset:
|
12364 |
config: default
|
12365 |
name: MTEB BSARDRetrieval
|
@@ -12651,6 +12725,166 @@ model-index:
|
|
12651 |
value: 25.676
|
12652 |
task:
|
12653 |
type: Retrieval
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12654 |
- dataset:
|
12655 |
config: fr
|
12656 |
name: MTEB MintakaRetrieval (fr)
|
@@ -12942,6 +13176,184 @@ model-index:
|
|
12942 |
value: 32.064
|
12943 |
task:
|
12944 |
type: Retrieval
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12945 |
- dataset:
|
12946 |
config: default
|
12947 |
name: MTEB SICKFr
|
@@ -12969,6 +13381,164 @@ model-index:
|
|
12969 |
value: 77.47140335069184
|
12970 |
task:
|
12971 |
type: STS
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
12972 |
- dataset:
|
12973 |
config: default
|
12974 |
name: MTEB SyntecRetrieval
|
|
|
12069 |
value: 89.12
|
12070 |
task:
|
12071 |
type: Classification
|
12072 |
+
- dataset:
|
12073 |
+
config: default
|
12074 |
+
name: MTEB AlloProfClusteringP2P
|
12075 |
+
revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b
|
12076 |
+
split: test
|
12077 |
+
type: lyon-nlp/alloprof
|
12078 |
+
metrics:
|
12079 |
+
- type: main_score
|
12080 |
+
value: 66.7100274116735
|
12081 |
+
- type: v_measure
|
12082 |
+
value: 66.7100274116735
|
12083 |
+
- type: v_measure_std
|
12084 |
+
value: 2.065600197695283
|
12085 |
+
task:
|
12086 |
+
type: Clustering
|
12087 |
+
- dataset:
|
12088 |
+
config: default
|
12089 |
+
name: MTEB AlloProfClusteringS2S
|
12090 |
+
revision: 392ba3f5bcc8c51f578786c1fc3dae648662cb9b
|
12091 |
+
split: test
|
12092 |
+
type: lyon-nlp/alloprof
|
12093 |
+
metrics:
|
12094 |
+
- type: main_score
|
12095 |
+
value: 47.67572024379311
|
12096 |
+
- type: v_measure
|
12097 |
+
value: 47.67572024379311
|
12098 |
+
- type: v_measure_std
|
12099 |
+
value: 3.1905282169494953
|
12100 |
+
task:
|
12101 |
+
type: Clustering
|
12102 |
+
- dataset:
|
12103 |
+
config: default
|
12104 |
+
name: MTEB AlloprofReranking
|
12105 |
+
revision: 65393d0d7a08a10b4e348135e824f385d420b0fd
|
12106 |
+
split: test
|
12107 |
+
type: lyon-nlp/mteb-fr-reranking-alloprof-s2p
|
12108 |
+
metrics:
|
12109 |
+
- type: main_score
|
12110 |
+
value: 75.04647907753767
|
12111 |
+
- type: map
|
12112 |
+
value: 75.04647907753767
|
12113 |
+
- type: mrr
|
12114 |
+
value: 76.25801875154207
|
12115 |
+
- type: nAUC_map_diff1
|
12116 |
+
value: 56.38279442235466
|
12117 |
+
- type: nAUC_map_max
|
12118 |
+
value: 20.009630947768642
|
12119 |
+
- type: nAUC_map_std
|
12120 |
+
value: 21.626818227466185
|
12121 |
+
- type: nAUC_mrr_diff1
|
12122 |
+
value: 56.33463291672874
|
12123 |
+
- type: nAUC_mrr_max
|
12124 |
+
value: 20.472794140230853
|
12125 |
+
- type: nAUC_mrr_std
|
12126 |
+
value: 21.491759650866392
|
12127 |
+
task:
|
12128 |
+
type: Reranking
|
12129 |
- dataset:
|
12130 |
config: default
|
12131 |
name: MTEB AlloprofRetrieval
|
|
|
12417 |
value: 60.363
|
12418 |
task:
|
12419 |
type: Retrieval
|
12420 |
+
- dataset:
|
12421 |
+
config: fr
|
12422 |
+
name: MTEB AmazonReviewsClassification (fr)
|
12423 |
+
revision: 1399c76144fd37290681b995c656ef9b2e06e26d
|
12424 |
+
split: test
|
12425 |
+
type: mteb/amazon_reviews_multi
|
12426 |
+
metrics:
|
12427 |
+
- type: accuracy
|
12428 |
+
value: 52.622
|
12429 |
+
- type: f1
|
12430 |
+
value: 48.89589865194384
|
12431 |
+
- type: f1_weighted
|
12432 |
+
value: 48.89589865194384
|
12433 |
+
- type: main_score
|
12434 |
+
value: 52.622
|
12435 |
+
task:
|
12436 |
+
type: Classification
|
12437 |
- dataset:
|
12438 |
config: default
|
12439 |
name: MTEB BSARDRetrieval
|
|
|
12725 |
value: 25.676
|
12726 |
task:
|
12727 |
type: Retrieval
|
12728 |
+
- dataset:
|
12729 |
+
config: default
|
12730 |
+
name: MTEB HALClusteringS2S
|
12731 |
+
revision: e06ebbbb123f8144bef1a5d18796f3dec9ae2915
|
12732 |
+
split: test
|
12733 |
+
type: lyon-nlp/clustering-hal-s2s
|
12734 |
+
metrics:
|
12735 |
+
- type: main_score
|
12736 |
+
value: 26.958035381361377
|
12737 |
+
- type: v_measure
|
12738 |
+
value: 26.958035381361377
|
12739 |
+
- type: v_measure_std
|
12740 |
+
value: 2.401353383071989
|
12741 |
+
task:
|
12742 |
+
type: Clustering
|
12743 |
+
- dataset:
|
12744 |
+
config: fr
|
12745 |
+
name: MTEB MLSUMClusteringP2P (fr)
|
12746 |
+
revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7
|
12747 |
+
split: test
|
12748 |
+
type: reciTAL/mlsum
|
12749 |
+
metrics:
|
12750 |
+
- type: main_score
|
12751 |
+
value: 46.15554988136895
|
12752 |
+
- type: v_measure
|
12753 |
+
value: 46.15554988136895
|
12754 |
+
- type: v_measure_std
|
12755 |
+
value: 2.459531525134688
|
12756 |
+
task:
|
12757 |
+
type: Clustering
|
12758 |
+
- dataset:
|
12759 |
+
config: fr
|
12760 |
+
name: MTEB MLSUMClusteringS2S (fr)
|
12761 |
+
revision: b5d54f8f3b61ae17845046286940f03c6bc79bc7
|
12762 |
+
split: test
|
12763 |
+
type: reciTAL/mlsum
|
12764 |
+
metrics:
|
12765 |
+
- type: main_score
|
12766 |
+
value: 45.73187202144909
|
12767 |
+
- type: v_measure
|
12768 |
+
value: 45.73187202144909
|
12769 |
+
- type: v_measure_std
|
12770 |
+
value: 1.6402520163270633
|
12771 |
+
task:
|
12772 |
+
type: Clustering
|
12773 |
+
- dataset:
|
12774 |
+
config: fr
|
12775 |
+
name: MTEB MTOPDomainClassification (fr)
|
12776 |
+
revision: d80d48c1eb48d3562165c59d59d0034df9fff0bf
|
12777 |
+
split: test
|
12778 |
+
type: mteb/mtop_domain
|
12779 |
+
metrics:
|
12780 |
+
- type: accuracy
|
12781 |
+
value: 95.78766050735986
|
12782 |
+
- type: f1
|
12783 |
+
value: 95.61497706645892
|
12784 |
+
- type: f1_weighted
|
12785 |
+
value: 95.79887587161483
|
12786 |
+
- type: main_score
|
12787 |
+
value: 95.78766050735986
|
12788 |
+
task:
|
12789 |
+
type: Classification
|
12790 |
+
- dataset:
|
12791 |
+
config: fr
|
12792 |
+
name: MTEB MTOPIntentClassification (fr)
|
12793 |
+
revision: ae001d0e6b1228650b7bd1c2c65fb50ad11a8aba
|
12794 |
+
split: test
|
12795 |
+
type: mteb/mtop_intent
|
12796 |
+
metrics:
|
12797 |
+
- type: accuracy
|
12798 |
+
value: 80.8800501096148
|
12799 |
+
- type: f1
|
12800 |
+
value: 53.9945274705194
|
12801 |
+
- type: f1_weighted
|
12802 |
+
value: 80.94438738414857
|
12803 |
+
- type: main_score
|
12804 |
+
value: 80.8800501096148
|
12805 |
+
task:
|
12806 |
+
type: Classification
|
12807 |
+
- dataset:
|
12808 |
+
config: fra
|
12809 |
+
name: MTEB MasakhaNEWSClassification (fra)
|
12810 |
+
revision: 18193f187b92da67168c655c9973a165ed9593dd
|
12811 |
+
split: test
|
12812 |
+
type: mteb/masakhanews
|
12813 |
+
metrics:
|
12814 |
+
- type: accuracy
|
12815 |
+
value: 83.6255924170616
|
12816 |
+
- type: f1
|
12817 |
+
value: 79.70294641135138
|
12818 |
+
- type: f1_weighted
|
12819 |
+
value: 83.33457992982105
|
12820 |
+
- type: main_score
|
12821 |
+
value: 83.6255924170616
|
12822 |
+
task:
|
12823 |
+
type: Classification
|
12824 |
+
- dataset:
|
12825 |
+
config: fra
|
12826 |
+
name: MTEB MasakhaNEWSClusteringP2P (fra)
|
12827 |
+
revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60
|
12828 |
+
split: test
|
12829 |
+
type: masakhane/masakhanews
|
12830 |
+
metrics:
|
12831 |
+
- type: main_score
|
12832 |
+
value: 77.1970570860131
|
12833 |
+
- type: v_measure
|
12834 |
+
value: 77.1970570860131
|
12835 |
+
- type: v_measure_std
|
12836 |
+
value: 22.0055550035463
|
12837 |
+
task:
|
12838 |
+
type: Clustering
|
12839 |
+
- dataset:
|
12840 |
+
config: fra
|
12841 |
+
name: MTEB MasakhaNEWSClusteringS2S (fra)
|
12842 |
+
revision: 8ccc72e69e65f40c70e117d8b3c08306bb788b60
|
12843 |
+
split: test
|
12844 |
+
type: masakhane/masakhanews
|
12845 |
+
metrics:
|
12846 |
+
- type: main_score
|
12847 |
+
value: 65.92601417312947
|
12848 |
+
- type: v_measure
|
12849 |
+
value: 65.92601417312947
|
12850 |
+
- type: v_measure_std
|
12851 |
+
value: 30.421071440935687
|
12852 |
+
task:
|
12853 |
+
type: Clustering
|
12854 |
+
- dataset:
|
12855 |
+
config: fr
|
12856 |
+
name: MTEB MassiveIntentClassification (fr)
|
12857 |
+
revision: 4672e20407010da34463acc759c162ca9734bca6
|
12858 |
+
split: test
|
12859 |
+
type: mteb/amazon_massive_intent
|
12860 |
+
metrics:
|
12861 |
+
- type: accuracy
|
12862 |
+
value: 69.5359784801614
|
12863 |
+
- type: f1
|
12864 |
+
value: 64.640488940591
|
12865 |
+
- type: f1_weighted
|
12866 |
+
value: 67.85916565361048
|
12867 |
+
- type: main_score
|
12868 |
+
value: 69.5359784801614
|
12869 |
+
task:
|
12870 |
+
type: Classification
|
12871 |
+
- dataset:
|
12872 |
+
config: fr
|
12873 |
+
name: MTEB MassiveScenarioClassification (fr)
|
12874 |
+
revision: fad2c6e8459f9e1c45d9315f4953d921437d70f8
|
12875 |
+
split: test
|
12876 |
+
type: mteb/amazon_massive_scenario
|
12877 |
+
metrics:
|
12878 |
+
- type: accuracy
|
12879 |
+
value: 78.52723604572965
|
12880 |
+
- type: f1
|
12881 |
+
value: 77.1995224144067
|
12882 |
+
- type: f1_weighted
|
12883 |
+
value: 78.1215987283123
|
12884 |
+
- type: main_score
|
12885 |
+
value: 78.52723604572965
|
12886 |
+
task:
|
12887 |
+
type: Classification
|
12888 |
- dataset:
|
12889 |
config: fr
|
12890 |
name: MTEB MintakaRetrieval (fr)
|
|
|
13176 |
value: 32.064
|
13177 |
task:
|
13178 |
type: Retrieval
|
13179 |
+
- dataset:
|
13180 |
+
config: fr
|
13181 |
+
name: MTEB OpusparcusPC (fr)
|
13182 |
+
revision: 9e9b1f8ef51616073f47f306f7f47dd91663f86a
|
13183 |
+
split: test
|
13184 |
+
type: GEM/opusparcus
|
13185 |
+
metrics:
|
13186 |
+
- type: cosine_accuracy
|
13187 |
+
value: 82.62942779291554
|
13188 |
+
- type: cosine_accuracy_threshold
|
13189 |
+
value: 83.4860622882843
|
13190 |
+
- type: cosine_ap
|
13191 |
+
value: 93.39616519364185
|
13192 |
+
- type: cosine_f1
|
13193 |
+
value: 88.03378695448146
|
13194 |
+
- type: cosine_f1_threshold
|
13195 |
+
value: 83.4860622882843
|
13196 |
+
- type: cosine_precision
|
13197 |
+
value: 83.45195729537367
|
13198 |
+
- type: cosine_recall
|
13199 |
+
value: 93.14796425024826
|
13200 |
+
- type: dot_accuracy
|
13201 |
+
value: 82.62942779291554
|
13202 |
+
- type: dot_accuracy_threshold
|
13203 |
+
value: 83.4860622882843
|
13204 |
+
- type: dot_ap
|
13205 |
+
value: 93.39616519364185
|
13206 |
+
- type: dot_f1
|
13207 |
+
value: 88.03378695448146
|
13208 |
+
- type: dot_f1_threshold
|
13209 |
+
value: 83.4860622882843
|
13210 |
+
- type: dot_precision
|
13211 |
+
value: 83.45195729537367
|
13212 |
+
- type: dot_recall
|
13213 |
+
value: 93.14796425024826
|
13214 |
+
- type: euclidean_accuracy
|
13215 |
+
value: 82.62942779291554
|
13216 |
+
- type: euclidean_accuracy_threshold
|
13217 |
+
value: 57.4698805809021
|
13218 |
+
- type: euclidean_ap
|
13219 |
+
value: 93.39616519364185
|
13220 |
+
- type: euclidean_f1
|
13221 |
+
value: 88.03378695448146
|
13222 |
+
- type: euclidean_f1_threshold
|
13223 |
+
value: 57.4698805809021
|
13224 |
+
- type: euclidean_precision
|
13225 |
+
value: 83.45195729537367
|
13226 |
+
- type: euclidean_recall
|
13227 |
+
value: 93.14796425024826
|
13228 |
+
- type: main_score
|
13229 |
+
value: 93.39616519364185
|
13230 |
+
- type: manhattan_accuracy
|
13231 |
+
value: 82.62942779291554
|
13232 |
+
- type: manhattan_accuracy_threshold
|
13233 |
+
value: 1306.7530632019043
|
13234 |
+
- type: manhattan_ap
|
13235 |
+
value: 93.34098710518775
|
13236 |
+
- type: manhattan_f1
|
13237 |
+
value: 87.78409090909089
|
13238 |
+
- type: manhattan_f1_threshold
|
13239 |
+
value: 1335.2685928344727
|
13240 |
+
- type: manhattan_precision
|
13241 |
+
value: 83.89140271493213
|
13242 |
+
- type: manhattan_recall
|
13243 |
+
value: 92.05561072492551
|
13244 |
+
- type: max_ap
|
13245 |
+
value: 93.39616519364185
|
13246 |
+
- type: max_f1
|
13247 |
+
value: 88.03378695448146
|
13248 |
+
- type: max_precision
|
13249 |
+
value: 83.89140271493213
|
13250 |
+
- type: max_recall
|
13251 |
+
value: 93.14796425024826
|
13252 |
+
- type: similarity_accuracy
|
13253 |
+
value: 82.62942779291554
|
13254 |
+
- type: similarity_accuracy_threshold
|
13255 |
+
value: 83.4860622882843
|
13256 |
+
- type: similarity_ap
|
13257 |
+
value: 93.39616519364185
|
13258 |
+
- type: similarity_f1
|
13259 |
+
value: 88.03378695448146
|
13260 |
+
- type: similarity_f1_threshold
|
13261 |
+
value: 83.4860622882843
|
13262 |
+
- type: similarity_precision
|
13263 |
+
value: 83.45195729537367
|
13264 |
+
- type: similarity_recall
|
13265 |
+
value: 93.14796425024826
|
13266 |
+
task:
|
13267 |
+
type: PairClassification
|
13268 |
+
- dataset:
|
13269 |
+
config: fr
|
13270 |
+
name: MTEB PawsXPairClassification (fr)
|
13271 |
+
revision: 8a04d940a42cd40658986fdd8e3da561533a3646
|
13272 |
+
split: test
|
13273 |
+
type: google-research-datasets/paws-x
|
13274 |
+
metrics:
|
13275 |
+
- type: cosine_accuracy
|
13276 |
+
value: 60.8
|
13277 |
+
- type: cosine_accuracy_threshold
|
13278 |
+
value: 98.90193939208984
|
13279 |
+
- type: cosine_ap
|
13280 |
+
value: 60.50913122978733
|
13281 |
+
- type: cosine_f1
|
13282 |
+
value: 62.69411339833874
|
13283 |
+
- type: cosine_f1_threshold
|
13284 |
+
value: 95.17210125923157
|
13285 |
+
- type: cosine_precision
|
13286 |
+
value: 46.51661307609861
|
13287 |
+
- type: cosine_recall
|
13288 |
+
value: 96.12403100775194
|
13289 |
+
- type: dot_accuracy
|
13290 |
+
value: 60.8
|
13291 |
+
- type: dot_accuracy_threshold
|
13292 |
+
value: 98.9019513130188
|
13293 |
+
- type: dot_ap
|
13294 |
+
value: 60.49770725998639
|
13295 |
+
- type: dot_f1
|
13296 |
+
value: 62.69411339833874
|
13297 |
+
- type: dot_f1_threshold
|
13298 |
+
value: 95.17210721969604
|
13299 |
+
- type: dot_precision
|
13300 |
+
value: 46.51661307609861
|
13301 |
+
- type: dot_recall
|
13302 |
+
value: 96.12403100775194
|
13303 |
+
- type: euclidean_accuracy
|
13304 |
+
value: 60.8
|
13305 |
+
- type: euclidean_accuracy_threshold
|
13306 |
+
value: 14.819307625293732
|
13307 |
+
- type: euclidean_ap
|
13308 |
+
value: 60.50917425308617
|
13309 |
+
- type: euclidean_f1
|
13310 |
+
value: 62.69411339833874
|
13311 |
+
- type: euclidean_f1_threshold
|
13312 |
+
value: 31.07377290725708
|
13313 |
+
- type: euclidean_precision
|
13314 |
+
value: 46.51661307609861
|
13315 |
+
- type: euclidean_recall
|
13316 |
+
value: 96.12403100775194
|
13317 |
+
- type: main_score
|
13318 |
+
value: 60.73371250119265
|
13319 |
+
- type: manhattan_accuracy
|
13320 |
+
value: 60.9
|
13321 |
+
- type: manhattan_accuracy_threshold
|
13322 |
+
value: 354.8734188079834
|
13323 |
+
- type: manhattan_ap
|
13324 |
+
value: 60.73371250119265
|
13325 |
+
- type: manhattan_f1
|
13326 |
+
value: 62.70506744440393
|
13327 |
+
- type: manhattan_f1_threshold
|
13328 |
+
value: 711.578369140625
|
13329 |
+
- type: manhattan_precision
|
13330 |
+
value: 46.73913043478261
|
13331 |
+
- type: manhattan_recall
|
13332 |
+
value: 95.23809523809523
|
13333 |
+
- type: max_ap
|
13334 |
+
value: 60.73371250119265
|
13335 |
+
- type: max_f1
|
13336 |
+
value: 62.70506744440393
|
13337 |
+
- type: max_precision
|
13338 |
+
value: 46.73913043478261
|
13339 |
+
- type: max_recall
|
13340 |
+
value: 96.12403100775194
|
13341 |
+
- type: similarity_accuracy
|
13342 |
+
value: 60.8
|
13343 |
+
- type: similarity_accuracy_threshold
|
13344 |
+
value: 98.90193939208984
|
13345 |
+
- type: similarity_ap
|
13346 |
+
value: 60.50913122978733
|
13347 |
+
- type: similarity_f1
|
13348 |
+
value: 62.69411339833874
|
13349 |
+
- type: similarity_f1_threshold
|
13350 |
+
value: 95.17210125923157
|
13351 |
+
- type: similarity_precision
|
13352 |
+
value: 46.51661307609861
|
13353 |
+
- type: similarity_recall
|
13354 |
+
value: 96.12403100775194
|
13355 |
+
task:
|
13356 |
+
type: PairClassification
|
13357 |
- dataset:
|
13358 |
config: default
|
13359 |
name: MTEB SICKFr
|
|
|
13381 |
value: 77.47140335069184
|
13382 |
task:
|
13383 |
type: STS
|
13384 |
+
- dataset:
|
13385 |
+
config: fr
|
13386 |
+
name: MTEB STS22 (fr)
|
13387 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
13388 |
+
split: test
|
13389 |
+
type: mteb/sts22-crosslingual-sts
|
13390 |
+
metrics:
|
13391 |
+
- type: cosine_pearson
|
13392 |
+
value: 77.1356210910051
|
13393 |
+
- type: cosine_spearman
|
13394 |
+
value: 81.7065039306575
|
13395 |
+
- type: euclidean_pearson
|
13396 |
+
value: 79.32575551712296
|
13397 |
+
- type: euclidean_spearman
|
13398 |
+
value: 81.75624482168821
|
13399 |
+
- type: main_score
|
13400 |
+
value: 81.7065039306575
|
13401 |
+
- type: manhattan_pearson
|
13402 |
+
value: 81.05436417153798
|
13403 |
+
- type: manhattan_spearman
|
13404 |
+
value: 82.13370902176736
|
13405 |
+
- type: pearson
|
13406 |
+
value: 77.1356210910051
|
13407 |
+
- type: spearman
|
13408 |
+
value: 81.7065039306575
|
13409 |
+
task:
|
13410 |
+
type: STS
|
13411 |
+
- dataset:
|
13412 |
+
config: de-fr
|
13413 |
+
name: MTEB STS22 (de-fr)
|
13414 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
13415 |
+
split: test
|
13416 |
+
type: mteb/sts22-crosslingual-sts
|
13417 |
+
metrics:
|
13418 |
+
- type: cosine_pearson
|
13419 |
+
value: 61.40659325490285
|
13420 |
+
- type: cosine_spearman
|
13421 |
+
value: 64.21007088135842
|
13422 |
+
- type: euclidean_pearson
|
13423 |
+
value: 61.051174476106
|
13424 |
+
- type: euclidean_spearman
|
13425 |
+
value: 64.21007088135842
|
13426 |
+
- type: main_score
|
13427 |
+
value: 64.21007088135842
|
13428 |
+
- type: manhattan_pearson
|
13429 |
+
value: 60.225817072214525
|
13430 |
+
- type: manhattan_spearman
|
13431 |
+
value: 64.32288638294209
|
13432 |
+
- type: pearson
|
13433 |
+
value: 61.40659325490285
|
13434 |
+
- type: spearman
|
13435 |
+
value: 64.21007088135842
|
13436 |
+
task:
|
13437 |
+
type: STS
|
13438 |
+
- dataset:
|
13439 |
+
config: fr-pl
|
13440 |
+
name: MTEB STS22 (fr-pl)
|
13441 |
+
revision: de9d86b3b84231dc21f76c7b7af1f28e2f57f6e3
|
13442 |
+
split: test
|
13443 |
+
type: mteb/sts22-crosslingual-sts
|
13444 |
+
metrics:
|
13445 |
+
- type: cosine_pearson
|
13446 |
+
value: 88.17138238483673
|
13447 |
+
- type: cosine_spearman
|
13448 |
+
value: 84.51542547285167
|
13449 |
+
- type: euclidean_pearson
|
13450 |
+
value: 87.99782696047525
|
13451 |
+
- type: euclidean_spearman
|
13452 |
+
value: 84.51542547285167
|
13453 |
+
- type: main_score
|
13454 |
+
value: 84.51542547285167
|
13455 |
+
- type: manhattan_pearson
|
13456 |
+
value: 85.811937669563
|
13457 |
+
- type: manhattan_spearman
|
13458 |
+
value: 84.51542547285167
|
13459 |
+
- type: pearson
|
13460 |
+
value: 88.17138238483673
|
13461 |
+
- type: spearman
|
13462 |
+
value: 84.51542547285167
|
13463 |
+
task:
|
13464 |
+
type: STS
|
13465 |
+
- dataset:
|
13466 |
+
config: fr
|
13467 |
+
name: MTEB STSBenchmarkMultilingualSTS (fr)
|
13468 |
+
revision: 29afa2569dcedaaa2fe6a3dcfebab33d28b82e8c
|
13469 |
+
split: test
|
13470 |
+
type: mteb/stsb_multi_mt
|
13471 |
+
metrics:
|
13472 |
+
- type: cosine_pearson
|
13473 |
+
value: 79.98375089796882
|
13474 |
+
- type: cosine_spearman
|
13475 |
+
value: 81.06570417849169
|
13476 |
+
- type: euclidean_pearson
|
13477 |
+
value: 79.44759787417051
|
13478 |
+
- type: euclidean_spearman
|
13479 |
+
value: 81.06430479357311
|
13480 |
+
- type: main_score
|
13481 |
+
value: 81.06570417849169
|
13482 |
+
- type: manhattan_pearson
|
13483 |
+
value: 79.34683573713086
|
13484 |
+
- type: manhattan_spearman
|
13485 |
+
value: 81.00584846124926
|
13486 |
+
- type: pearson
|
13487 |
+
value: 79.98375089796882
|
13488 |
+
- type: spearman
|
13489 |
+
value: 81.06570417849169
|
13490 |
+
task:
|
13491 |
+
type: STS
|
13492 |
+
- dataset:
|
13493 |
+
config: default
|
13494 |
+
name: MTEB SummEvalFr
|
13495 |
+
revision: b385812de6a9577b6f4d0f88c6a6e35395a94054
|
13496 |
+
split: test
|
13497 |
+
type: lyon-nlp/summarization-summeval-fr-p2p
|
13498 |
+
metrics:
|
13499 |
+
- type: cosine_pearson
|
13500 |
+
value: 31.198220154029464
|
13501 |
+
- type: cosine_spearman
|
13502 |
+
value: 30.886000528607877
|
13503 |
+
- type: dot_pearson
|
13504 |
+
value: 31.19822718500702
|
13505 |
+
- type: dot_spearman
|
13506 |
+
value: 30.86590068433314
|
13507 |
+
- type: main_score
|
13508 |
+
value: 30.886000528607877
|
13509 |
+
- type: pearson
|
13510 |
+
value: 31.198220154029464
|
13511 |
+
- type: spearman
|
13512 |
+
value: 30.886000528607877
|
13513 |
+
task:
|
13514 |
+
type: Summarization
|
13515 |
+
- dataset:
|
13516 |
+
config: default
|
13517 |
+
name: MTEB SyntecReranking
|
13518 |
+
revision: daf0863838cd9e3ba50544cdce3ac2b338a1b0ad
|
13519 |
+
split: test
|
13520 |
+
type: lyon-nlp/mteb-fr-reranking-syntec-s2p
|
13521 |
+
metrics:
|
13522 |
+
- type: main_score
|
13523 |
+
value: 86.6
|
13524 |
+
- type: map
|
13525 |
+
value: 86.6
|
13526 |
+
- type: mrr
|
13527 |
+
value: 86.6
|
13528 |
+
- type: nAUC_map_diff1
|
13529 |
+
value: 59.66160008216082
|
13530 |
+
- type: nAUC_map_max
|
13531 |
+
value: 19.768885092568734
|
13532 |
+
- type: nAUC_map_std
|
13533 |
+
value: 44.66975354255961
|
13534 |
+
- type: nAUC_mrr_diff1
|
13535 |
+
value: 59.66160008216082
|
13536 |
+
- type: nAUC_mrr_max
|
13537 |
+
value: 19.768885092568734
|
13538 |
+
- type: nAUC_mrr_std
|
13539 |
+
value: 44.66975354255961
|
13540 |
+
task:
|
13541 |
+
type: Reranking
|
13542 |
- dataset:
|
13543 |
config: default
|
13544 |
name: MTEB SyntecRetrieval
|