results / demo-leaderboard /results_chatglm-3-6b.json
mirageco's picture
Update demo-leaderboard/results_chatglm-3-6b.json
25aa51f verified
raw
history blame
3.28 kB
{
"config": {
"model_type": "pretrained",
"model_dtype": "torch.float16",
"model_name": "THUDM/chatglm3-6b",
"model_sha": "main"
},
"results": {
"FPB": {
"F1": 0.74,
"Acc": 0.74
},
"FiQA-SA": {
"F1": 0.56
},
"TSA": {
"RMSE": 0.35
},
"Headlines": {
"AvgF1": 0.66
},
"FOMC": {
"F1": 0.47,
"Acc": 0.46
},
"FinArg-ACC": {
"MicroF1": 0.25
},
"FinArg-ARC": {
"MicroF1": 0.50
},
"MultiFin": {
"MicroF1": 0.47
},
"MA": {
"MicroF1": 0.79
},
"MLESG": {
"MicroF1": 0.16
},
"NER": {
"EntityF1": 0.25
},
"FINER-ORD": {
"EntityF1": 0.02
},
"FinRED": {
"F1": 0.00
},
"SC": {
"F1": 0.81
},
"CD": {
"F1": 0.00
},
"FinQA": {
"EmAcc": 0.00
},
"TATQA": {
"EmAcc": 0.07
},
"ConvFinQA": {
"EmAcc": 0.00
},
"FNXL": {
"EntityF1": 0.00
},
"FSRL": {
"EntityF1": 0.00
},
"EDTSUM": {
"Rouge-1": 0.13,
"BertScore": 0.47,
"BartScore": -4.78
},
"ECTSUM": {
"Rouge-1": 0.00,
"BertScore": 0.00,
"BartScore": -5.18
},
"BigData22": {
"Acc": 0.47,
"MCC": 0.00
},
"ACL18": {
"Acc": 0.50,
"MCC": 0.02
},
"CIKM18": {
"Acc": 0.42,
"MCC": 0.02
},
"German": {
"F1": 0.41,
"MCC": -0.30
},
"Australian": {
"F1": 0.27,
"MCC": -0.02
},
"LendingClub": {
"F1": 0.72,
"MCC": -0.03
},
"ccf": {
"F1": 1.00,
"MCC": 0.00
},
"ccfraud": {
"F1": 0.67,
"MCC": -0.15
},
"polish": {
"F1": 0.05,
"MCC": 0.00
},
"taiwan": {
"F1": 0.05,
"MCC": 0.01
},
"portoseguro": {
"F1": 0.95,
"MCC": 0.06
},
"travelinsurance": {
"F1": 0.97,
"MCC": 0.03
},
"MultiFin-ES": {
"Acc": 0.13,
"F1": 0.14,
"Macro F1": 0.14,
"MCC": 0.08
},
"EFP": {
"Acc": 0.32,
"F1": 0.24,
"Macro F1": 0.24,
"MCC": 0.11
},
"EFPA": {
"Acc": 0.25,
"F1": 0.17,
"Macro F1": 0.17,
"MCC": -0.01
},
"FinanceES": {
"Acc": 0.02,
"F1": 0.04,
"Macro F1": 0.03,
"MCC": -0.00
},
"TSA-Spanish": {
"Acc": 0.05,
"F1": 0.09,
"Macro F1": 0.07,
"MCC": 0.04
}
}
}