results / demo-leaderboard /Llama3.1-8B.json
mirageco's picture
Add data from financial_llm_comparison
249a150
raw
history blame
1.67 kB
{
"config": {
"model_type": "pretrained",
"model_dtype": "torch.float16",
"model_name": "LLaMA3.1-8B",
"model_sha": "main"
},
"results": {
"NER": {
"EntityF1": 0.14
},
"FINER-ORD": {
"EntityF1": 0.12
},
"FinRED": {
"F1": 0.00
},
"SC": {
"F1": 0.83
},
"CD": {
"F1": 0.00
},
"FNXL": {
"EntityF1": 0.00
},
"FSRL": {
"EntityF1": 0.00
},
"FPB": {
"F1": 0.76
},
"FiQA-SA": {
"F1": 0.75
},
"TSA": {
"RMSE": 0.17
},
"Headlines": {
"AvgF1": 0.60
},
"FOMC": {
"F1": 0.48
},
"FinArg-ACC": {
"MicroF1": 0.53
},
"FinArg-ARC": {
"MicroF1": 0.55
},
"MultiFin": {
"MicroF1": 0.62
},
"MA": {
"MicroF1": 0.85
},
"MLESG": {
"MicroF1": 0.31
},
"FinQA": {
"EmAcc": 0.00
},
"TATQA": {
"EmAcc": 0.04
},
"Regulations": {
"Rouge-1": 0.27
},
"ConvFinQA": {
"EmAcc": 0.00
},
"EDTSUM": {
"Rouge-1": 0.20
},
"ECTSUM": {
"Rouge-1": 0.00
},
"BigData22": {
"Acc": 0.54
},
"ACL18": {
"Acc": 0.52
},
"CIKM18": {
"Acc": 0.57
}
}
}