clean table
#1
by
barthfab
- opened
README.md
CHANGED
@@ -93,30 +93,42 @@ Currently, we are working on more suitable benchmarks for Spanish, French, Germa
|
|
93 |
<details>
|
94 |
<summary>Evaluation results</summary>
|
95 |
|
96 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
97 |
### English
|
98 |
|
99 |
-
|
|
100 |
-
|
101 |
-
|
|
102 |
-
|
|
103 |
-
|
|
104 |
-
|
|
105 |
-
|
|
106 |
-
|
|
107 |
-
|
|
108 |
|
109 |
### French
|
110 |
|
111 |
-
|
|
112 |
-
|
113 |
-
|
|
114 |
-
|
|
115 |
-
|
|
116 |
-
|
|
117 |
-
|
|
118 |
-
|
|
119 |
-
|
|
120 |
|
121 |
</details>
|
122 |
|
|
|
93 |
<details>
|
94 |
<summary>Evaluation results</summary>
|
95 |
|
96 |
+
### All 5 Languages
|
97 |
+
|
98 |
+
| | avg | arc_challenge | belebele | hellaswag | mmlu | truthfulqa |
|
99 |
+
|:---------------------------|---------:|----------------:|-----------:|------------:|---------:|-------------:|
|
100 |
+
| Occiglot-7b-eu5 | 0.516895 | 0.508109 | 0.675556 | 0.718963 | 0.402064 | 0.279782 |
|
101 |
+
| Occiglot-7b-eu5-instruct | 0.537799 | 0.53632 | 0.691111 | 0.731918 | 0.405198 | 0.32445 |
|
102 |
+
| Occiglot-7b-fr-en | 0.509209 | 0.496806 | 0.691333 | 0.667475 | 0.409129 | 0.281303 |
|
103 |
+
| Occiglot-7b-fr-en-instruct | 0.52884 | 0.515613 | 0.723333 | 0.67371 | 0.413024 | 0.318521 |
|
104 |
+
| Claire-mistral-7b-0.1 | 0.514226 | 0.502773 | 0.705111 | 0.666871 | 0.412128 | 0.284245 |
|
105 |
+
| Mistral-7b-v0.1 | 0.547111 | 0.528937 | 0.768444 | 0.682516 | 0.448253 | 0.307403 |
|
106 |
+
| Mistral-7b-instruct-v0.2 | 0.56713 | 0.547228 | 0.741111 | 0.69455 | 0.422501 | 0.430262 |
|
107 |
+
|
108 |
+
|
109 |
### English
|
110 |
|
111 |
+
| | avg | arc_challenge | belebele | hellaswag | mmlu | truthfulqa |
|
112 |
+
|:---------------------------|---------:|----------------:|-----------:|------------:|---------:|-------------:|
|
113 |
+
| Occiglot-7b-eu5 | 0.59657 | 0.530717 | 0.726667 | 0.789882 | 0.531904 | 0.403678 |
|
114 |
+
| Occiglot-7b-eu5-instruct | 0.617905 | 0.558874 | 0.746667 | 0.799841 | 0.535109 | 0.449 |
|
115 |
+
| Occiglot-7b-fr-en | 0.621947 | 0.568259 | 0.771111 | 0.804919 | 0.570716 | 0.394726 |
|
116 |
+
| Occiglot-7b-fr-en-instruct | 0.646571 | 0.586177 | 0.794444 | 0.808305 | 0.569862 | 0.474064 |
|
117 |
+
| Claire-mistral-7b-0.1 | 0.651798 | 0.59727 | 0.817778 | 0.827126 | 0.600912 | 0.415906 |
|
118 |
+
| Mistral-7b-v0.1 | 0.668385 | 0.612628 | 0.844444 | 0.834097 | 0.624555 | 0.426201 |
|
119 |
+
| Mistral-7b-instruct-v0.2 | 0.713657 | 0.637372 | 0.824444 | 0.846345 | 0.59201 | 0.668116 |
|
120 |
|
121 |
### French
|
122 |
|
123 |
+
| | avg | arc_challenge_fr | belebele_fr | hellaswag_fr | mmlu_fr | truthfulqa_fr |
|
124 |
+
|:---------------------------|---------:|-------------------:|--------------:|---------------:|----------:|----------------:|
|
125 |
+
| Occiglot-7b-eu5 | 0.525017 | 0.506416 | 0.675556 | 0.712358 | 0.495684 | 0.23507 |
|
126 |
+
| Occiglot-7b-eu5-instruct | 0.554216 | 0.541488 | 0.7 | 0.724245 | 0.499122 | 0.306226 |
|
127 |
+
| Occiglot-7b-fr-en | 0.542903 | 0.532934 | 0.706667 | 0.718891 | 0.51333 | 0.242694 |
|
128 |
+
| Occiglot-7b-fr-en-instruct | 0.567079 | 0.542344 | 0.752222 | 0.72553 | 0.52051 | 0.29479 |
|
129 |
+
| Claire-mistral-7b-0.1 | 0.515127 | 0.486741 | 0.694444 | 0.642964 | 0.479566 | 0.271919 |
|
130 |
+
| Mistral-7b-v0.1 | 0.558129 | 0.525235 | 0.776667 | 0.66481 | 0.543121 | 0.280813 |
|
131 |
+
| Mistral-7b-instruct-v0.2 | 0.575821 | 0.551754 | 0.758889 | 0.67916 | 0.506837 | 0.382465 |
|
132 |
|
133 |
</details>
|
134 |
|