from dataclasses import dataclass from enum import Enum @dataclass class Task: benchmark: str metric: str col_name: str # Select your tasks here # --------------------------------------------------- class Tasks(Enum): # task_key in the json file, metric_key in the json file, name to display in the leaderboard task0 = Task("arc_challenge_ita", "acc_norm,none", "ARC-C") task1 = Task("ami_2020_aggressiveness", "f1,none", "AMI 2020 Agg") task2 = Task("ami_2020_misogyny", "f1,none", "AMI 2020 Miso") task3 = Task("gente_rephrasing", "acc,none", "GeNTE Rephr") task4 = Task("belebele_ita", "acc_norm,none", "Belebele") task5 = Task("hatecheck_ita", "f1,none", "HateCheck") task6 = Task("honest_ita", "acc,none", "HONEST") task7 = Task("itacola", "mcc,none", "ItaCoLA") task8 = Task("news_sum", "bertscore,none", "News Sum") task9 = Task("squad_it", "squad_f1,get-answer", "SQuAD it") task10 = Task("truthfulqa_gen_ita", "rouge1_max,none", "TruthfulQA") NUM_FEWSHOT = 0 # Change with your few shot # --------------------------------------------------- # Your leaderboard name TITLE = """

ItaEval leaderboard

""" # What does your leaderboard evaluate? INTRODUCTION_TEXT = """ This leaderboard evaluates language models on ItaEval, a new unified benchmark for Italian. """ ITA_EVAL_REPO = "https://github.com/g8a9/ita-eval" # Which evaluations are you running? how can people reproduce what you have? LLM_BENCHMARKS_TEXT = f""" ## How it works ## Reproducibility To reproduce our results, head to {ITA_EVAL_REPO} for all the instructions. If all the setup goes smoothly, you can run 'MODEL' on ItaEval with: ```bash MODEL="..." lm_eval -mixed_precision=bf16 --model hf \ --model_args pretrained=$MODEL,dtype=bfloat16 \ --tasks ita_eval \ --device cuda:0 \ --batch_size "auto" \ --log_samples \ --output_path $FAST/ita_eval_v1/$MODEL \ --use_cache $FAST/ita_eval_v1/$MODEL \ --cache_requests "true" ``` """ EVALUATION_QUEUE_TEXT = """ We do not plan to accept autonomous submissions, yet. """ CITATION_BUTTON_LABEL = "Copy the following snippet to cite these results" CITATION_BUTTON_TEXT = r""" We are working on it! :) """