Fixing some errors of the leaderboard evaluation results in the ModelCard yaml
#5
by
leaderboard-pt-pr-bot
- opened
README.md
CHANGED
@@ -68,6 +68,19 @@ model-index:
|
|
68 |
- type: f1_macro
|
69 |
value: 90.31
|
70 |
name: f1-macro
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
71 |
- type: pearson
|
72 |
value: 76.55
|
73 |
name: pearson
|
@@ -95,7 +108,7 @@ model-index:
|
|
95 |
name: Text Generation
|
96 |
dataset:
|
97 |
name: HateBR Binary
|
98 |
-
type:
|
99 |
split: test
|
100 |
args:
|
101 |
num_few_shot: 25
|
@@ -103,6 +116,19 @@ model-index:
|
|
103 |
- type: f1_macro
|
104 |
value: 79.21
|
105 |
name: f1-macro
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
106 |
- type: f1_macro
|
107 |
value: 68.87
|
108 |
name: f1-macro
|
|
|
68 |
- type: f1_macro
|
69 |
value: 90.31
|
70 |
name: f1-macro
|
71 |
+
source:
|
72 |
+
url: https://huggingface.co/spaces/eduagarcia/open_pt_llm_leaderboard?query=cnmoro/Mistral-7B-Portuguese
|
73 |
+
name: Open Portuguese LLM Leaderboard
|
74 |
+
- task:
|
75 |
+
type: text-generation
|
76 |
+
name: Text Generation
|
77 |
+
dataset:
|
78 |
+
name: Assin2 STS
|
79 |
+
type: eduagarcia/portuguese_benchmark
|
80 |
+
split: test
|
81 |
+
args:
|
82 |
+
num_few_shot: 15
|
83 |
+
metrics:
|
84 |
- type: pearson
|
85 |
value: 76.55
|
86 |
name: pearson
|
|
|
108 |
name: Text Generation
|
109 |
dataset:
|
110 |
name: HateBR Binary
|
111 |
+
type: ruanchaves/hatebr
|
112 |
split: test
|
113 |
args:
|
114 |
num_few_shot: 25
|
|
|
116 |
- type: f1_macro
|
117 |
value: 79.21
|
118 |
name: f1-macro
|
119 |
+
source:
|
120 |
+
url: https://huggingface.co/spaces/eduagarcia/open_pt_llm_leaderboard?query=cnmoro/Mistral-7B-Portuguese
|
121 |
+
name: Open Portuguese LLM Leaderboard
|
122 |
+
- task:
|
123 |
+
type: text-generation
|
124 |
+
name: Text Generation
|
125 |
+
dataset:
|
126 |
+
name: PT Hate Speech Binary
|
127 |
+
type: hate_speech_portuguese
|
128 |
+
split: test
|
129 |
+
args:
|
130 |
+
num_few_shot: 25
|
131 |
+
metrics:
|
132 |
- type: f1_macro
|
133 |
value: 68.87
|
134 |
name: f1-macro
|