g8a9 commited on
Commit
45517cb
β€’
1 Parent(s): 61e7dfb
Files changed (2) hide show
  1. app.py +85 -83
  2. latest_results.tsv +18 -18
app.py CHANGED
@@ -154,59 +154,61 @@ with demo:
154
 
155
  with gr.Tabs(elem_classes="tab-buttons") as tabs:
156
  with gr.TabItem("πŸ… LLM Benchmark", elem_id="llm-benchmark-tab-table", id=0):
157
- with gr.Row():
158
- # with gr.Column():
159
- # with gr.Row():
160
- search_bar = gr.Textbox(
161
- placeholder=" πŸ” Search for your model (separate multiple queries with `;`) and press ENTER...",
162
- show_label=False,
163
- elem_id="search-bar",
164
- )
165
-
166
- # with gr.Row():
167
- # shown_columns = gr.CheckboxGroup(
168
- # choices=[c.name for c in fields(AutoEvalColumn) if not c.hidden and not c.never_hidden],
169
- # value=[
170
- # c.name
171
- # for c in fields(AutoEvalColumn)
172
- # if c.displayed_by_default and not c.hidden and not c.never_hidden
173
- # ],
174
- # label="Select columns to show",
175
- # elem_id="column-select",
176
- # interactive=True,
177
- # )
178
- # with gr.Row():
179
- # deleted_models_visibility = gr.Checkbox(
180
- # value=False, label="Show gated/private/deleted models", interactive=True
181
- # )
182
- # with gr.Column(min_width=320):
183
- # with gr.Box(elem_id="box-filter"):
184
- filter_columns_type = gr.CheckboxGroup(
185
- label="Model types",
186
- choices=[t.to_str() for t in ModelType],
187
- value=[t.to_str() for t in ModelType],
188
- interactive=True,
189
- elem_id="filter-columns-type",
190
- )
191
- # filter_columns_precision = gr.CheckboxGroup(
192
- # label="Precision",
193
- # choices=[i.value.name for i in Precision],
194
- # value=[i.value.name for i in Precision],
195
- # interactive=True,
196
- # elem_id="filter-columns-precision",
197
- # )
198
- filter_columns_size = gr.CheckboxGroup(
199
- label="Model sizes (in billions of parameters)",
200
- choices=list(NUMERIC_INTERVALS.keys()),
201
- value=list(NUMERIC_INTERVALS.keys()),
202
- interactive=True,
203
- elem_id="filter-columns-size",
204
- )
205
 
206
  leaderboard_table = gr.components.Dataframe(
207
  value=leaderboard_df[
208
  [c.name for c in fields(AutoEvalColumn) if c.never_hidden or c.displayed_by_default]
209
- ], # ,# ] + shown_columns],
 
 
210
  headers=[
211
  c.name for c in fields(AutoEvalColumn) if c.never_hidden or c.displayed_by_default
212
  ], ##, if c.never_hidden] + shown_columns,
@@ -223,40 +225,40 @@ with demo:
223
  datatype=TYPES,
224
  visible=False,
225
  )
226
- search_bar.submit(
227
- update_table,
228
- [
229
- hidden_leaderboard_table_for_search,
230
- # None,
231
- filter_columns_type,
232
- # filter_columns_precision,
233
- filter_columns_size,
234
- # None,
235
- search_bar,
236
- ],
237
- leaderboard_table,
238
- )
239
- for selector in [
240
- # shown_columns,
241
- filter_columns_type,
242
- # filter_columns_precision,
243
- filter_columns_size,
244
- # deleted_models_visibility,
245
- ]:
246
- selector.change(
247
- update_table,
248
- [
249
- hidden_leaderboard_table_for_search,
250
- # None,
251
- filter_columns_type,
252
- # filter_columns_precision,
253
- filter_columns_size,
254
- # None,
255
- search_bar,
256
- ],
257
- leaderboard_table,
258
- queue=True,
259
- )
260
 
261
  with gr.TabItem("πŸ“ About", elem_id="llm-benchmark-tab-table", id=2):
262
  gr.Markdown(LLM_BENCHMARKS_TEXT, elem_classes="markdown-text")
 
154
 
155
  with gr.Tabs(elem_classes="tab-buttons") as tabs:
156
  with gr.TabItem("πŸ… LLM Benchmark", elem_id="llm-benchmark-tab-table", id=0):
157
+ # with gr.Row():
158
+ # # with gr.Column():
159
+ # # with gr.Row():
160
+ # search_bar = gr.Textbox(
161
+ # placeholder=" πŸ” Search for your model (separate multiple queries with `;`) and press ENTER...",
162
+ # show_label=False,
163
+ # elem_id="search-bar",
164
+ # )
165
+
166
+ # # with gr.Row():
167
+ # # shown_columns = gr.CheckboxGroup(
168
+ # # choices=[c.name for c in fields(AutoEvalColumn) if not c.hidden and not c.never_hidden],
169
+ # # value=[
170
+ # # c.name
171
+ # # for c in fields(AutoEvalColumn)
172
+ # # if c.displayed_by_default and not c.hidden and not c.never_hidden
173
+ # # ],
174
+ # # label="Select columns to show",
175
+ # # elem_id="column-select",
176
+ # # interactive=True,
177
+ # # )
178
+ # # with gr.Row():
179
+ # # deleted_models_visibility = gr.Checkbox(
180
+ # # value=False, label="Show gated/private/deleted models", interactive=True
181
+ # # )
182
+ # # with gr.Column(min_width=320):
183
+ # # with gr.Box(elem_id="box-filter"):
184
+ # filter_columns_type = gr.CheckboxGroup(
185
+ # label="Model types",
186
+ # choices=[t.to_str() for t in ModelType],
187
+ # value=[t.to_str() for t in ModelType],
188
+ # interactive=True,
189
+ # elem_id="filter-columns-type",
190
+ # )
191
+ # # filter_columns_precision = gr.CheckboxGroup(
192
+ # # label="Precision",
193
+ # # choices=[i.value.name for i in Precision],
194
+ # # value=[i.value.name for i in Precision],
195
+ # # interactive=True,
196
+ # # elem_id="filter-columns-precision",
197
+ # # )
198
+ # filter_columns_size = gr.CheckboxGroup(
199
+ # label="Model sizes (in billions of parameters)",
200
+ # choices=list(NUMERIC_INTERVALS.keys()),
201
+ # value=list(NUMERIC_INTERVALS.keys()),
202
+ # interactive=True,
203
+ # elem_id="filter-columns-size",
204
+ # )
205
 
206
  leaderboard_table = gr.components.Dataframe(
207
  value=leaderboard_df[
208
  [c.name for c in fields(AutoEvalColumn) if c.never_hidden or c.displayed_by_default]
209
+ ].applymap(
210
+ lambda x: x if isinstance(x, str) or isinstance(x, float) else round(x["value"], 2)
211
+ ), # ,# ] + shown_columns],
212
  headers=[
213
  c.name for c in fields(AutoEvalColumn) if c.never_hidden or c.displayed_by_default
214
  ], ##, if c.never_hidden] + shown_columns,
 
225
  datatype=TYPES,
226
  visible=False,
227
  )
228
+ # search_bar.submit(
229
+ # update_table,
230
+ # [
231
+ # hidden_leaderboard_table_for_search,
232
+ # # None,
233
+ # filter_columns_type,
234
+ # # filter_columns_precision,
235
+ # filter_columns_size,
236
+ # # None,
237
+ # search_bar,
238
+ # ],
239
+ # leaderboard_table,
240
+ # )
241
+ # for selector in [
242
+ # # shown_columns,
243
+ # filter_columns_type,
244
+ # # filter_columns_precision,
245
+ # filter_columns_size,
246
+ # # deleted_models_visibility,
247
+ # ]:
248
+ # selector.change(
249
+ # update_table,
250
+ # [
251
+ # hidden_leaderboard_table_for_search,
252
+ # # None,
253
+ # filter_columns_type,
254
+ # # filter_columns_precision,
255
+ # filter_columns_size,
256
+ # # None,
257
+ # search_bar,
258
+ # ],
259
+ # leaderboard_table,
260
+ # queue=True,
261
+ # )
262
 
263
  with gr.TabItem("πŸ“ About", elem_id="llm-benchmark-tab-table", id=2):
264
  gr.Markdown(LLM_BENCHMARKS_TEXT, elem_classes="markdown-text")
latest_results.tsv CHANGED
@@ -1,18 +1,18 @@
1
- eval_name Precision Type T Weight type Architecture Model Average ⬆️ Hub License #Params (B) Model sha Hub ❀️ Available on the hub Code Data AMI 2020 Agg AMI 2020 Miso ARC-C Belebele GeNTE Neutralizing HaSpeeDe2 HS HaSpeeDe2 Stereo HateCheck HONEST IronITA Irony IronITA Sarcasm ItaCoLA News Sum SENTIPOLC SQuAD it TruthfulQA XCOPA
2
- 6 meta-llama_Meta-Llama-3-8B-Instruct_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Meta-Llama-3-8B-Instruct"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Meta-Llama-3-8B-Instruct</a>" 58.748316329193344 Meta Llama 3 Community License 8.0 0 True πŸ™ˆ πŸ™ˆ 55.37407439022941 71.59839304531086 42.57679180887372 82.0 32.48322147651007 70.53457622533335 63.09031737569537 81.04353954390334 100.0 68.90825671526659 50.63388859343638 0.2575796842123843 35.87793977181792 44.40535171743039 76.4493013414765 51.688145906790595 71.8
3
- 5 mistralai_Mistral-7B-Instruct-v0.2_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mistralai/Mistral-7B-Instruct-v0.2</a>" 57.34121935588332 Apache 2.0 7.0 0 True πŸ™ˆ πŸ™ˆ 59.26344649551212 67.03848859411114 44.36860068259386 67.55555555555556 29.12751677852349 70.94842426874283 66.92711073442074 77.91591984780963 100.0 60.340552982611825 52.5864148320762 0.2650337064892725 36.39365330456299 50.86004322897759 67.76589485305061 59.24407318497844 64.2
4
- 7 meta-llama_Meta-Llama-3-8B_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Meta-Llama-3-8B"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Meta-Llama-3-8B</a>" 56.06703915537942 Meta Llama 3 Community License 8.0 0 True πŸ™ˆ πŸ™ˆ 59.167006941608825 65.2988113338495 40.44368600682594 75.88888888888889 29.664429530201343 66.34318803515383 59.665954331496216 80.46901075930542 100.0 55.417040602648825 56.72119925007975 0.27369249994767686 32.8415569535643 41.65027333775969 76.0261495015472 42.068777668572736 71.2
5
- 11 mii-community_zefiro-7b-dpo-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-dpo-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-dpo-ITA</a>" 55.965402990833326 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ 58.82330921555731 65.29219074291716 44.19795221843004 66.11111111111111 29.395973154362416 66.42034413085725 62.04374417717792 82.92405607588724 100.0 59.58686440677966 54.61088096497907 0.15622781170005148 35.73603929970904 40.115316478607326 74.25556784297711 43.342273213113806 68.4
6
- 12 mii-community_zefiro-7b-sft-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-sft-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-sft-ITA</a>" 55.49594502634554 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ 59.05979939301703 65.1057440915327 42.491467576791806 68.11111111111111 26.845637583892618 66.26712374430319 62.8192509112937 82.66496052951742 100.0 52.30611640858258 51.83751520046043 0.1357069141230042 34.79253286178762 46.95941666858784 74.51464966490876 42.52003278796419 67.0
7
- 10 mii-community_zefiro-7b-base-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-base-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-base-ITA</a>" 55.00699465875708 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ 58.26528760660498 64.28826512391971 41.04095563139932 58.77777777777777 27.651006711409398 63.41040091554036 60.20187319698322 83.36773972540995 100.0 59.61581980369971 57.22956187895212 0.21630746589700614 34.14146108746794 38.60348969137316 75.51969438076942 46.18926820166605 66.60000000000001
8
- 4 mistralai_Mistral-7B-v0.1_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mistralai/Mistral-7B-v0.1"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mistralai/Mistral-7B-v0.1</a>" 54.53886941414389 Apache 2.0 7.0 0 True πŸ™ˆ πŸ™ˆ 57.328824884373255 65.894796072133 41.12627986348123 65.55555555555556 29.395973154362416 60.74292449685459 58.40138983607699 81.20893551611952 100.0 55.21599398531273 56.0842910054169 0.21650562273812077 33.95578203972551 38.248077168561004 74.9929389324236 43.192511907311555 65.60000000000001
9
- 13 swap-uniba_LLaMAntino-2-chat-13b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-chat-13b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-chat-13b-hf-ITA</a>" 54.11970329791716 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ‘ 61.10842468417231 65.37114603439397 39.16382252559727 60.22222222222222 25.369127516778523 69.19701593869706 58.47240303675274 81.91924285348375 100.0 60.50989600805099 52.82407691311843 0.14705407414144434 23.961182038838874 33.936431374370564 72.99623578596571 44.43667505800782 70.39999999999999
10
- 9 meta-llama_Llama-2-13b-hf_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Llama-2-13b-hf"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Llama-2-13b-hf</a>" 51.53259991565551 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ™ˆ 55.5211353099392 59.73745072519405 39.67576791808874 49.77777777777778 24.295302013422816 56.705263521819575 55.58451703385505 75.35374357525852 100.0 49.6392951529161 51.32659342493766 0.15611794645515564 34.99992804182015 35.591589638147205 75.37248669035945 42.917229796152284 69.39999999999999
11
- 15 swap-uniba_LLaMAntino-2-13b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-13b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-13b-hf-ITA</a>" 51.398892791161465 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ‘ 56.91493042765838 60.79803569083185 38.395904436860064 52.22222222222223 24.563758389261743 59.591680814940574 53.72166074176572 68.63908831908832 100.0 53.8835564536499 55.220925077582386 0.24355772539252643 23.46778181911886 37.868993755237724 74.32140387879224 42.12767769734223 71.8
12
- 3 g8a9_tweety-mistral-7b_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/g8a9/tweety-mistral-7b"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">g8a9/tweety-mistral-7b</a>" 48.30841019247476 Apache 2.0 7.0 0 True πŸ‘ πŸ‘ 51.45449792748049 56.83712780075503 38.310580204778155 49.77777777777778 26.308724832214764 56.756734367216744 54.259763500716296 64.359704127708 100.0 48.96104026840812 49.87333014539054 0.12625704978630167 18.72596344839197 30.051768572855263 64.28422203983018 37.75548120876116 73.4
13
- 8 meta-llama_Llama-2-7b-hf_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Llama-2-7b-hf"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Llama-2-7b-hf</a>" 47.88766168947672 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ™ˆ 50.167656275074535 58.36785332162082 34.8976109215017 36.0 24.832214765100673 51.08771929824562 54.388067109409945 68.27095354111434 100.0 47.98695094164673 52.28499188648629 0.1232306318769991 33.83386905556545 28.13533353128773 68.54722302033736 39.16657442183617 66.0
14
- 14 swap-uniba_LLaMAntino-2-7b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-7b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-7b-hf-ITA</a>" 45.77323088190624 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ‘ 50.55555555555556 53.96398030216369 33.532423208191126 35.0 24.295302013422816 45.45927084511112 48.916213374427244 63.034868799504395 100.0 49.374306621370714 47.508286764686886 0.12030506441959186 24.681836570629475 24.100219820665426 69.11938518934485 40.482970862913184 68.0
15
- 0 sapienzanlp_Minerva-3B-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-3B-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-3B-base-v1.0</a>" 41.83701792171881 Apache 2.0 3.0 0 True πŸ™ˆ πŸ‘ 49.23359098650596 52.79773093447906 30.97269624573379 24.333333333333336 23.221476510067113 48.934170047390545 45.62087699981554 48.50152328821496 100.0 45.47176216254846 46.937293275884066 -0.033345460872866474 22.064438703049753 23.965207913141235 43.23710703078177 37.371442699147025 68.60000000000001
16
- 16 swap-uniba_LLaMAntino-2-chat-7b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-chat-7b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-chat-7b-hf-ITA</a>" 39.41202334600491 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ‘ 46.2034115499462 45.34461746324341 29.266211604095567 28.111111111111107 23.758389261744966 42.88181951386289 42.392736217028414 46.58756852047553 100.0 41.699322128331325 45.99082660952828 0.006128977707520721 8.108838055814852 9.097213196911147 58.875305188040464 39.880897484241906 61.8
17
- 2 sapienzanlp_Minerva-1B-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-1B-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-1B-base-v1.0</a>" 38.91315019063004 Apache 2.0 1.0 0 True πŸ™ˆ πŸ‘ 48.12085869829324 54.850361197110416 24.573378839590443 22.666666666666664 26.44295302013423 49.56106111987823 46.22580429357212 49.08730795600027 100.0 45.20836949340911 47.013888888888886 0.040313621284920456 14.386315956732856 16.24451875278343 17.353822380105154 39.74793235626088 60.0
18
- 1 sapienzanlp_Minerva-350M-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-350M-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-350M-base-v1.0</a>" 37.29084548916654 Apache 2.0 0.35 0 True πŸ™ˆ πŸ‘ 45.17543859649123 37.91598801552352 24.40273037542662 22.88888888888889 53.8255033557047 42.03399318323408 40.00324919625145 46.79714365710485 100.0 38.049817139468225 44.255424938736375 -0.01382899490742639 10.341357559414417 22.94165519039672 4.978320972441255 43.74869124165633 56.599999999999994
 
1
+ eval_name Precision Type T Weight type Architecture Model Avg NLU Avg CFK Avg BFS Avg ⬆️ Hub License #Params (B) Model sha Hub ❀️ Available on the hub Code Data AMI 2020 Agg AMI 2020 Miso ARC-C Belebele GeNTE Neutralizing HaSpeeDe2 HS HaSpeeDe2 Stereo HateCheck HONEST IronITA Irony IronITA Sarcasm ItaCoLA News Sum SENTIPOLC SQuAD it TruthfulQA XCOPA Hellaswag-it
2
+ 6 swap-uniba_LLaMAntino-3-ANITA-8B-Inst-DPO-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-3-ANITA-8B-Inst-DPO-ITA</a>" 50.15912285053053 66.8637539701687 69.6423469054011 62.22174124203344 Llama 3 Community License Agreement 8.0 0 True πŸ‘ πŸ‘ {'value': 49.61934617107031, 'category': 'NLU'} {'value': 73.58604698054239, 'category': 'NLU'} {'value': 56.484641638225256, 'category': 'CFK'} {'value': 83.55555555555556, 'category': 'NLU'} {'value': 33.8255033557047, 'category': 'BFS'} {'value': 72.24399819126907, 'category': 'BFS'} {'value': 61.627116844508144, 'category': 'BFS'} {'value': 80.51511613552358, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 67.79529918401192, 'category': 'NLU'} {'value': 46.19514665929917, 'category': 'NLU'} {'value': 0.24261234404280246, 'category': 'NLU'} {'value': 33.783978293075634, 'category': 'NLU'} {'value': 46.49499761664646, 'category': 'NLU'} {'value': 71.27317142821833, 'category': 'CFK'} {'value': 68.09423700746308, 'category': 'CFK'} {'value': 73.4, 'category': 'CFK'} {'value': 65.06671977693686, 'category': 'CFK'}
3
+ 13 mistralai_Mistral-7B-Instruct-v0.2_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mistralai/Mistral-7B-Instruct-v0.2"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mistralai/Mistral-7B-Instruct-v0.2</a>" 49.62173851779433 58.69183546703023 69.176267976939 59.163280653921184 Apache 2.0 7.0 0 True πŸ™ˆ πŸ™ˆ {'value': 61.95096430524839, 'category': 'NLU'} {'value': 66.42194008585093, 'category': 'NLU'} {'value': 44.36860068259386, 'category': 'CFK'} {'value': 67.22222222222223, 'category': 'NLU'} {'value': 29.12751677852349, 'category': 'BFS'} {'value': 71.07491292799637, 'category': 'BFS'} {'value': 67.27017961567233, 'category': 'BFS'} {'value': 78.40873056250285, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 59.16469471738617, 'category': 'NLU'} {'value': 55.53851376330874, 'category': 'NLU'} {'value': 0.27708420363666786, 'category': 'NLU'} {'value': 36.377962201593874, 'category': 'NLU'} {'value': 50.02052664310759, 'category': 'NLU'} {'value': 68.04841543730598, 'category': 'CFK'} {'value': 59.24407318497844, 'category': 'CFK'} {'value': 64.2, 'category': 'CFK'} {'value': 57.598088030272855, 'category': 'CFK'}
4
+ 8 mii-community_zefiro-7b-dpo-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-dpo-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-dpo-ITA</a>" 47.51869156825104 57.89491206679833 68.2055343559792 57.87304599700952 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ {'value': 59.97920997920998, 'category': 'NLU'} {'value': 66.14027143881808, 'category': 'NLU'} {'value': 44.19795221843004, 'category': 'CFK'} {'value': 65.88888888888889, 'category': 'NLU'} {'value': 29.798657718120808, 'category': 'BFS'} {'value': 66.93068606112085, 'category': 'BFS'} {'value': 61.46209894750329, 'category': 'BFS'} {'value': 82.83622905315102, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 58.523449206965395, 'category': 'NLU'} {'value': 54.918191698733956, 'category': 'NLU'} {'value': 0.22337556862808253, 'category': 'NLU'} {'value': 35.66642647158017, 'category': 'NLU'} {'value': 38.80971929318383, 'category': 'NLU'} {'value': 74.34293876621986, 'category': 'CFK'} {'value': 43.34227321311386, 'category': 'CFK'} {'value': 68.4, 'category': 'CFK'} {'value': 59.191396136227844, 'category': 'CFK'}
5
+ 5 meta-llama_Meta-Llama-3-8B_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Meta-Llama-3-8B"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Meta-Llama-3-8B</a>" 48.29856362856205 57.42199318457142 66.8371135489557 57.51922345402972 Meta Llama 3 Community License 8.0 0 True πŸ™ˆ πŸ™ˆ {'value': 60.02710027100271, 'category': 'NLU'} {'value': 63.14678395603251, 'category': 'NLU'} {'value': 40.529010238907844, 'category': 'CFK'} {'value': 76.0, 'category': 'NLU'} {'value': 29.53020134228188, 'category': 'BFS'} {'value': 65.30297764359561, 'category': 'BFS'} {'value': 59.541073390095356, 'category': 'BFS'} {'value': 79.81131536880565, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 57.31801541230962, 'category': 'NLU'} {'value': 56.750548188367965, 'category': 'NLU'} {'value': 0.2786244415689118, 'category': 'NLU'} {'value': 32.93607461627173, 'category': 'NLU'} {'value': 39.93136214294286, 'category': 'NLU'} {'value': 76.49082768675667, 'category': 'CFK'} {'value': 42.06877766857276, 'category': 'CFK'} {'value': 71.2, 'category': 'CFK'} {'value': 56.8213503286198, 'category': 'CFK'}
6
+ 15 mii-community_zefiro-7b-base-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-base-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-base-ITA</a>" 46.70847713397559 57.115645622716485 67.35862672334422 57.06091649334544 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ {'value': 60.14362403797995, 'category': 'NLU'} {'value': 64.54082375784897, 'category': 'NLU'} {'value': 40.955631399317404, 'category': 'CFK'} {'value': 58.55555555555556, 'category': 'NLU'} {'value': 28.456375838926174, 'category': 'BFS'} {'value': 66.12858980217781, 'category': 'BFS'} {'value': 59.74063711314884, 'category': 'BFS'} {'value': 82.46753086246828, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 59.05311714498798, 'category': 'NLU'} {'value': 57.8863223808017, 'category': 'NLU'} {'value': 0.09963712635854956, 'category': 'NLU'} {'value': 34.19887652648641, 'category': 'NLU'} {'value': 39.18986054178559, 'category': 'NLU'} {'value': 75.6692177776856, 'category': 'CFK'} {'value': 46.18926820166605, 'category': 'CFK'} {'value': 66.60000000000001, 'category': 'CFK'} {'value': 56.164110734913365, 'category': 'CFK'}
7
+ 10 mii-community_zefiro-7b-sft-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mii-community/zefiro-7b-sft-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mii-community/zefiro-7b-sft-ITA</a>" 46.84018585967878 56.40022559897527 67.23695288016117 56.825788112938405 Apache 2.0 7.0 0 True πŸ™ˆ πŸ‘ {'value': 60.458679319889285, 'category': 'NLU'} {'value': 63.51256529535591, 'category': 'NLU'} {'value': 42.32081911262799, 'category': 'CFK'} {'value': 67.77777777777779, 'category': 'NLU'} {'value': 27.248322147651006, 'category': 'BFS'} {'value': 65.72752014372092, 'category': 'BFS'} {'value': 60.158604473839915, 'category': 'BFS'} {'value': 83.05031763559394, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 52.69566548195397, 'category': 'NLU'} {'value': 51.630329924754, 'category': 'NLU'} {'value': 0.08940878967203518, 'category': 'NLU'} {'value': 34.80608014621687, 'category': 'NLU'} {'value': 43.75098014181036, 'category': 'NLU'} {'value': 74.55382319645513, 'category': 'CFK'} {'value': 42.52003278796414, 'category': 'CFK'} {'value': 67.0, 'category': 'CFK'} {'value': 55.606452897829115, 'category': 'CFK'}
8
+ 11 mistralai_Mistral-7B-v0.1_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/mistralai/Mistral-7B-v0.1"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">mistralai/Mistral-7B-v0.1</a>" 46.732352034614806 56.099282379017794 65.68372283507196 56.17178574956819 Apache 2.0 7.0 0 True πŸ™ˆ πŸ™ˆ {'value': 60.52050697114497, 'category': 'NLU'} {'value': 63.66158365032981, 'category': 'NLU'} {'value': 41.21160409556314, 'category': 'CFK'} {'value': 65.66666666666666, 'category': 'NLU'} {'value': 29.53020134228188, 'category': 'BFS'} {'value': 60.38816689466484, 'category': 'BFS'} {'value': 57.907599364752336, 'category': 'BFS'} {'value': 80.59264657366079, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 55.23299236027556, 'category': 'NLU'} {'value': 55.67900219124808, 'category': 'NLU'} {'value': 0.131895692851752, 'category': 'NLU'} {'value': 34.09475870496535, 'category': 'NLU'} {'value': 38.87141003943634, 'category': 'NLU'} {'value': 75.08500650762954, 'category': 'CFK'} {'value': 43.19251190731156, 'category': 'CFK'} {'value': 65.60000000000001, 'category': 'CFK'} {'value': 55.40728938458474, 'category': 'CFK'}
9
+ 9 swap-uniba_LLaMAntino-2-chat-13b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-chat-13b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-chat-13b-hf-ITA</a>" 44.43040251782813 56.03239812713 66.71944637887557 55.72741567461123 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ‘ {'value': 61.41230947327803, 'category': 'NLU'} {'value': 64.77739009492042, 'category': 'NLU'} {'value': 39.07849829351536, 'category': 'CFK'} {'value': 60.44444444444444, 'category': 'NLU'} {'value': 25.503355704697988, 'category': 'BFS'} {'value': 67.1548291501024, 'category': 'BFS'} {'value': 59.101414060364085, 'category': 'BFS'} {'value': 81.83763297921335, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 57.92048929663609, 'category': 'NLU'} {'value': 52.2777996043644, 'category': 'NLU'} {'value': 0.1015435288181161, 'category': 'NLU'} {'value': 23.81691473597593, 'category': 'NLU'} {'value': 34.69232896418751, 'category': 'NLU'} {'value': 73.10003377486571, 'category': 'CFK'} {'value': 44.43667505800782, 'category': 'CFK'} {'value': 70.39999999999999, 'category': 'CFK'} {'value': 53.146783509261105, 'category': 'CFK'}
10
+ 0 meta-llama_Llama-2-13b-hf_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Llama-2-13b-hf"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Llama-2-13b-hf</a>" 41.85065195875397 56.39967689118707 61.493331674248495 53.24788684139651 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ™ˆ {'value': 53.24565637065637, 'category': 'NLU'} {'value': 59.32319654843206, 'category': 'NLU'} {'value': 39.93174061433447, 'category': 'CFK'} {'value': 49.666666666666664, 'category': 'NLU'} {'value': 24.295302013422816, 'category': 'BFS'} {'value': 54.13600451447075, 'category': 'BFS'} {'value': 54.88702987697715, 'category': 'BFS'} {'value': 74.1483219663718, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 50.34584608393744, 'category': 'NLU'} {'value': 49.636673785442774, 'category': 'NLU'} {'value': 0.11758183179468357, 'category': 'NLU'} {'value': 35.09699883531247, 'category': 'NLU'} {'value': 37.37259554778931, 'category': 'NLU'} {'value': 75.22840229480128, 'category': 'CFK'} {'value': 42.91722979615231, 'category': 'CFK'} {'value': 69.39999999999999, 'category': 'CFK'} {'value': 54.52101175064728, 'category': 'CFK'}
11
+ 3 swap-uniba_LLaMAntino-2-13b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-13b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-13b-hf-ITA</a>" 42.55180887560955 56.134928395066495 60.86087434667943 53.18253720578516 Llama 2 Community License 13.0 0 True πŸ™ˆ πŸ‘ {'value': 56.79723502304148, 'category': 'NLU'} {'value': 60.93495016444478, 'category': 'NLU'} {'value': 38.56655290102389, 'category': 'CFK'} {'value': 52.33333333333333, 'category': 'NLU'} {'value': 24.697986577181208, 'category': 'BFS'} {'value': 57.1976786986929, 'category': 'BFS'} {'value': 54.2447910290625, 'category': 'BFS'} {'value': 68.16391542846057, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 56.51605280366516, 'category': 'NLU'} {'value': 51.571111501558086, 'category': 'NLU'} {'value': 0.16387751408972254, 'category': 'NLU'} {'value': 23.495330157527007, 'category': 'NLU'} {'value': 38.60258050721683, 'category': 'NLU'} {'value': 74.20709928774112, 'category': 'CFK'} {'value': 42.12767769734222, 'category': 'CFK'} {'value': 71.8, 'category': 'CFK'} {'value': 53.97331208922525, 'category': 'CFK'}
12
+ 2 g8a9_tweety-mistral-7b_bfloat16 bfloat16 fine-tuned πŸ”Ά Delta MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/g8a9/tweety-mistral-7b"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">g8a9/tweety-mistral-7b</a>" 38.661388892098834 54.395439989754266 59.92245878392879 50.99309588859396 Apache 2.0 7.0 0 True πŸ‘ πŸ‘ {'value': 56.17170479302832, 'category': 'NLU'} {'value': 56.423255312264054, 'category': 'NLU'} {'value': 37.96928327645051, 'category': 'CFK'} {'value': 49.666666666666664, 'category': 'NLU'} {'value': 27.91946308724832, 'category': 'BFS'} {'value': 53.70259637851317, 'category': 'BFS'} {'value': 53.57434872305199, 'category': 'BFS'} {'value': 64.41588573083048, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 50.21506876304183, 'category': 'NLU'} {'value': 49.42973129711966, 'category': 'NLU'} {'value': 0.11006633622278786, 'category': 'NLU'} {'value': 18.81035591897043, 'category': 'NLU'} {'value': 28.46426204947685, 'category': 'NLU'} {'value': 64.39794432633592, 'category': 'CFK'} {'value': 37.75548120876122, 'category': 'CFK'} {'value': 73.4, 'category': 'CFK'} {'value': 58.45449113722366, 'category': 'CFK'}
13
+ 14 meta-llama_Llama-2-7b-hf_bfloat16 bfloat16 pretrained 🟒 Original LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/meta-llama/Llama-2-7b-hf"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">meta-llama/Llama-2-7b-hf</a>" 37.63391313202453 51.72929042818041 58.70465604622213 49.35595320214236 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ™ˆ {'value': 50.26836062232489, 'category': 'NLU'} {'value': 57.089775606014214, 'category': 'NLU'} {'value': 35.153583617747444, 'category': 'CFK'} {'value': 36.11111111111111, 'category': 'NLU'} {'value': 25.100671140939596, 'category': 'BFS'} {'value': 49.33536331841416, 'category': 'BFS'} {'value': 51.73318260900284, 'category': 'BFS'} {'value': 67.35406316275402, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 47.63910390674802, 'category': 'NLU'} {'value': 48.347086153434084, 'category': 'NLU'} {'value': 0.036528464070504335, 'category': 'NLU'} {'value': 33.756452251726735, 'category': 'NLU'} {'value': 27.82288694076669, 'category': 'NLU'} {'value': 68.6449557225095, 'category': 'CFK'} {'value': 39.16657442183614, 'category': 'CFK'} {'value': 66.0, 'category': 'CFK'} {'value': 49.681338378809, 'category': 'CFK'}
14
+ 1 swap-uniba_LLaMAntino-2-7b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-7b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-7b-hf-ITA</a>" 34.9414685603479 52.13761513263328 55.65104269523598 47.57670879607239 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ‘ {'value': 51.11111111111111, 'category': 'NLU'} {'value': 53.267951636107355, 'category': 'NLU'} {'value': 33.70307167235495, 'category': 'CFK'} {'value': 34.66666666666667, 'category': 'NLU'} {'value': 24.295302013422816, 'category': 'BFS'} {'value': 45.514286626950536, 'category': 'BFS'} {'value': 47.59019966407009, 'category': 'BFS'} {'value': 60.855425171736485, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 47.55193616643805, 'category': 'NLU'} {'value': 46.04838972288254, 'category': 'NLU'} {'value': 0.043130721156949686, 'category': 'NLU'} {'value': 24.582547279426233, 'category': 'NLU'} {'value': 22.260015178994326, 'category': 'NLU'} {'value': 69.30864535653794, 'category': 'CFK'} {'value': 40.48297086291322, 'category': 'CFK'} {'value': 68.0, 'category': 'CFK'} {'value': 49.193387771360285, 'category': 'CFK'}
15
+ 7 sapienzanlp_Minerva-3B-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-3B-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-3B-base-v1.0</a>" 32.51780487861425 45.62270743274333 53.03397360999342 43.72482864045033 Apache 2.0 3.0 0 True πŸ™ˆ πŸ‘ {'value': 49.875480140137604, 'category': 'NLU'} {'value': 52.15633707230505, 'category': 'NLU'} {'value': 30.97269624573379, 'category': 'CFK'} {'value': 24.333333333333336, 'category': 'NLU'} {'value': 23.08724832214765, 'category': 'BFS'} {'value': 48.93622623624203, 'category': 'BFS'} {'value': 45.71528801169143, 'category': 'BFS'} {'value': 47.43110547988597, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 43.13118956315911, 'category': 'NLU'} {'value': 45.77114427860697, 'category': 'NLU'} {'value': -0.015363788820154219, 'category': 'NLU'} {'value': 21.8700732759084, 'category': 'NLU'} {'value': 23.020245154283693, 'category': 'NLU'} {'value': 42.99174436502196, 'category': 'CFK'} {'value': 37.371442699146954, 'category': 'CFK'} {'value': 68.60000000000001, 'category': 'CFK'} {'value': 48.17765385381398, 'category': 'CFK'}
16
+ 12 swap-uniba_LLaMAntino-2-chat-7b-hf-ITA_bfloat16 bfloat16 fine-tuned πŸ”Ά Adapter LlamaForCausalLM "<a target=""_blank"" href=""https://huggingface.co/swap-uniba/LLaMAntino-2-chat-7b-hf-ITA"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">swap-uniba/LLaMAntino-2-chat-7b-hf-ITA</a>" 27.755861293433362 46.472723322751754 50.321561903359665 41.51671550651493 Llama 2 Community License 7.0 0 True πŸ™ˆ πŸ‘ {'value': 47.32809806550469, 'category': 'NLU'} {'value': 43.776841477788466, 'category': 'NLU'} {'value': 29.180887372013654, 'category': 'CFK'} {'value': 28.111111111111107, 'category': 'NLU'} {'value': 23.48993288590604, 'category': 'BFS'} {'value': 41.57668822526659, 'category': 'BFS'} {'value': 41.556830771361305, 'category': 'BFS'} {'value': 44.984357634264406, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 41.716872329343005, 'category': 'NLU'} {'value': 43.53102430893341, 'category': 'NLU'} {'value': -0.02574637563194932, 'category': 'NLU'} {'value': 8.269309204888462, 'category': 'NLU'} {'value': 9.339380225529704, 'category': 'NLU'} {'value': 58.43272201840739, 'category': 'CFK'} {'value': 39.880897484241935, 'category': 'CFK'} {'value': 61.8, 'category': 'CFK'} {'value': 43.06910973909579, 'category': 'CFK'}
17
+ 4 sapienzanlp_Minerva-1B-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-1B-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-1B-base-v1.0</a>" 31.262160888066564 35.95468750733228 53.9028524289684 40.37323360812241 Apache 2.0 1.0 0 True πŸ™ˆ πŸ‘ {'value': 50.76172656624852, 'category': 'NLU'} {'value': 53.84641914146224, 'category': 'NLU'} {'value': 24.573378839590443, 'category': 'CFK'} {'value': 22.666666666666664, 'category': 'NLU'} {'value': 26.57718120805369, 'category': 'BFS'} {'value': 48.25128927047713, 'category': 'BFS'} {'value': 44.581537708222804, 'category': 'BFS'} {'value': 50.10425395808837, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 46.49541549308013, 'category': 'NLU'} {'value': 45.46046920890855, 'category': 'NLU'} {'value': 0.022249590030925144, 'category': 'NLU'} {'value': 14.27287574762189, 'category': 'NLU'} {'value': 16.571464690513597, 'category': 'NLU'} {'value': 17.48160254077023, 'category': 'CFK'} {'value': 39.747932356260876, 'category': 'CFK'} {'value': 60.0, 'category': 'CFK'} {'value': 37.970523800039835, 'category': 'CFK'}
18
+ 16 sapienzanlp_Minerva-350M-base-v1.0_bfloat16 bfloat16 pretrained 🟒 Original MistralForCausalLM "<a target=""_blank"" href=""https://huggingface.co/sapienzanlp/Minerva-350M-base-v1.0"" style=""color: var(--link-text-color); text-decoration: underline;text-decoration-style: dotted;"">sapienzanlp/Minerva-350M-base-v1.0</a>" 27.112649526241633 32.348128725709046 56.325635671949826 38.59547130796684 Apache 2.0 0.35 0 True πŸ™ˆ πŸ‘ {'value': 45.17543859649123, 'category': 'NLU'} {'value': 35.72145622912868, 'category': 'NLU'} {'value': 24.40273037542662, 'category': 'CFK'} {'value': 22.88888888888889, 'category': 'NLU'} {'value': 52.75167785234899, 'category': 'BFS'} {'value': 41.92832319168979, 'category': 'BFS'} {'value': 40.67042217927179, 'category': 'BFS'} {'value': 46.277755136438564, 'category': 'BFS'} {'value': 100.0, 'category': 'BFS'} {'value': 36.23277134884009, 'category': 'NLU'} {'value': 43.223117993157416, 'category': 'NLU'} {'value': -0.036868413829916326, 'category': 'NLU'} {'value': 10.308018221966565, 'category': 'NLU'} {'value': 23.388373345290127, 'category': 'NLU'} {'value': 4.903980027793706, 'category': 'CFK'} {'value': 43.7486912416563, 'category': 'CFK'} {'value': 56.599999999999994, 'category': 'CFK'} {'value': 32.085241983668595, 'category': 'CFK'}