shahriarshm
commited on
Commit
β’
2f26474
1
Parent(s):
d619517
update benchmark results
Browse files- leaderboard/CohereForAI_aya_23_8B/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/MehdiHosseiniMoghadam_AVA_Llama_3_V2/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/PartAI_Dorna_Llama3_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/Qwen_Qwen2_72B_Instruct/results_2024-08-11T17:06:51+00:00.json +1 -0
- leaderboard/Qwen_Qwen2_7B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/claude_3_5_sonnet_20240620/{results_2024-08-09T21:20:10+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/gemma2_9b_it/results_2024-08-03T16:03:23+00:00.json +0 -1
- leaderboard/gemma2_9b_it/results_2024-08-11T17:06:51+00:00.json +1 -0
- leaderboard/gpt_3.5_turbo/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/gpt_4_turbo/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/gpt_4o/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/gpt_4o_mini/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/llama3_70b_8192/results_2024-08-11T17:06:51+00:00.json +1 -0
- leaderboard/meta_llama_Meta_Llama_3.1_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/meta_llama_Meta_Llama_3_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
- leaderboard/universitytehran_PersianMind_v1.0/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json} +1 -1
leaderboard/CohereForAI_aya_23_8B/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "CohereForAI_aya_23_8B", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.285}, "ParsiNLU Entailment": {"Exact Match": 0.395}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.318}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.675}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.35}, "Persian News Summary": {"Persian Rouge": 0.252}, "FarsTail Entailment": {"Exact Match": 0.467}, "Persian Math": {"Math Equivalence": 0.36}}}
|
leaderboard/MehdiHosseiniMoghadam_AVA_Llama_3_V2/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "MehdiHosseiniMoghadam_AVA_Llama_3_V2", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.225}, "ParsiNLU Entailment": {"Exact Match": 0.428}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.26}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.569}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.059}, "Persian News Summary": {"Persian Rouge": 0.275}, "FarsTail Entailment": {"Exact Match": 0.217}, "Persian Math": {"Math Equivalence": 0.434}}}
|
leaderboard/PartAI_Dorna_Llama3_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "PartAI_Dorna_Llama3_8B_Instruct", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.235}, "ParsiNLU Entailment": {"Exact Match": 0.411}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.265}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.61}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.246}, "Persian News Summary": {"Persian Rouge": 0.23}, "FarsTail Entailment": {"Exact Match": 0.408}, "Persian Math": {"Math Equivalence": 0.423}}}
|
leaderboard/Qwen_Qwen2_72B_Instruct/results_2024-08-11T17:06:51+00:00.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "Qwen_Qwen2_72B_Instruct", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.285}, "ParsiNLU Entailment": {"Exact Match": 0.793}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.236}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.694}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.308}, "Persian News Summary": {"Persian Rouge": 0.3}, "FarsTail Entailment": {"Exact Match": 0.833}, "Persian Math": {"Math Equivalence": 0.737}}}
|
leaderboard/Qwen_Qwen2_7B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "Qwen_Qwen2_7B_Instruct", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.395}, "ParsiNLU Entailment": {"Exact Match": 0.544}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.229}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.639}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.316}, "Persian News Summary": {"Persian Rouge": 0.27}, "FarsTail Entailment": {"Exact Match": 0.446}, "Persian Math": {"Math Equivalence": 0.6}}}
|
leaderboard/claude_3_5_sonnet_20240620/{results_2024-08-09T21:20:10+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "claude_3_5_sonnet_20240620", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.505}, "ParsiNLU Entailment": {"Exact Match": 0.851}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.216}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.496}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.181}, "Persian News Summary": {"Persian Rouge": 0.333}, "FarsTail Entailment": {"Exact Match": 0.944}, "Persian Math": {"Math Equivalence": 0.
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "claude_3_5_sonnet_20240620", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.505}, "ParsiNLU Entailment": {"Exact Match": 0.851}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.216}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.496}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.181}, "Persian News Summary": {"Persian Rouge": 0.333}, "FarsTail Entailment": {"Exact Match": 0.944}, "Persian Math": {"Math Equivalence": 0.851}}}
|
leaderboard/gemma2_9b_it/results_2024-08-03T16:03:23+00:00.json
DELETED
@@ -1 +0,0 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "google/gemma2-9b-it", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.305}, "ParsiNLU Entailment": {"Exact Match": 0.609}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.304}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.779}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.3}, "Persian News Summary": {"Persian Rouge": 0.36}, "FarsTail Entailment": {"Exact Match": 0.0}, "Persian Math": {"Math Equivalence": 0.48}}}
|
|
|
|
leaderboard/gemma2_9b_it/results_2024-08-11T17:06:51+00:00.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "gemma2_9b_it", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.305}, "ParsiNLU Entailment": {"Exact Match": 0.609}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.304}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.779}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.3}, "Persian News Summary": {"Persian Rouge": 0.36}, "FarsTail Entailment": {"Exact Match": 0.787}, "Persian Math": {"Math Equivalence": 0.702}}}
|
leaderboard/gpt_3.5_turbo/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "gpt_3.5_turbo", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.035}, "ParsiNLU Entailment": {"Exact Match": 0.432}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.343}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.681}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.36}, "Persian News Summary": {"Persian Rouge": 0.314}, "FarsTail Entailment": {"Exact Match": 0.366}, "Persian Math": {"Math Equivalence": 0.589}}}
|
leaderboard/gpt_4_turbo/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "gpt_4_turbo", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.135}, "ParsiNLU Entailment": {"Exact Match": 0.75}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.377}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.777}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.399}, "Persian News Summary": {"Persian Rouge": 0.348}, "FarsTail Entailment": {"Exact Match": 0.828}, "Persian Math": {"Math Equivalence": 0.811}}}
|
leaderboard/gpt_4o/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "gpt_4o", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.31}, "ParsiNLU Entailment": {"Exact Match": 0.771}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.349}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.752}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.431}, "Persian News Summary": {"Persian Rouge": 0.35}, "FarsTail Entailment": {"Exact Match": 0.893}, "Persian Math": {"Math Equivalence": 0.823}}}
|
leaderboard/gpt_4o_mini/{results_2024-08-06T19:44:56+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "gpt_4o_mini", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.24}, "ParsiNLU Entailment": {"Exact Match": 0.733}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.341}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.734}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.389}, "Persian News Summary": {"Persian Rouge": 0.335}, "FarsTail Entailment": {"Exact Match": 0.85}, "Persian Math": {"Math Equivalence": 0.781}}}
|
leaderboard/llama3_70b_8192/results_2024-08-11T17:06:51+00:00.json
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "llama3_70b_8192", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.37}, "ParsiNLU Entailment": {"Exact Match": 0.542}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.326}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.794}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.254}, "Persian News Summary": {"Persian Rouge": 0.358}, "FarsTail Entailment": {"Exact Match": 0.738}, "Persian Math": {"Math Equivalence": 0.68}}}
|
leaderboard/meta_llama_Meta_Llama_3.1_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "meta_llama_Meta_Llama_3.1_8B_Instruct", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.035}, "ParsiNLU Entailment": {"Exact Match": 0.462}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.301}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.734}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.29}, "Persian News Summary": {"Persian Rouge": 0.303}, "FarsTail Entailment": {"Exact Match": 0.409}, "Persian Math": {"Math Equivalence": 0.537}}}
|
leaderboard/meta_llama_Meta_Llama_3_8B_Instruct/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "meta_llama_Meta_Llama_3_8B_Instruct", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.215}, "ParsiNLU Entailment": {"Exact Match": 0.481}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.282}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.704}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.251}, "Persian News Summary": {"Persian Rouge": 0.09}, "FarsTail Entailment": {"Exact Match": 0.342}, "Persian Math": {"Math Equivalence": 0.497}}}
|
leaderboard/universitytehran_PersianMind_v1.0/{results_2024-08-03T15:34:49+00:00.json β results_2024-08-11T17:06:51+00:00.json}
RENAMED
@@ -1 +1 @@
|
|
1 |
-
{"config": {"model_dtype": "", "model_name": "
|
|
|
1 |
+
{"config": {"model_dtype": "", "model_name": "universitytehran_PersianMind_v1.0", "model_sha": ""}, "results": {"Persian MMLU": {"Exact Match": 0.0}, "ParsiNLU Entailment": {"Exact Match": 0.011}, "ParsiNLU Machine Translation En Fa": {"Persian Sentence Bleu": 0.296}, "ParsiNLU Reading Comprehension": {"Common Tokens": 0.5}, "ParsiNLU Machine Translation Fa En": {"English Sentence Bleu": 0.26}, "Persian News Summary": {"Persian Rouge": 0.359}, "FarsTail Entailment": {"Exact Match": 0.313}, "Persian Math": {"Math Equivalence": 0.376}}}
|