finetuned_mistral7b_title_generation / training_progress.json
CAlamos's picture
Upload 6 files
0c6e0ab
raw
history blame
15.1 kB
{
"batch_size": 1,
"best_eval_metric_checkpoint_number": 1,
"best_eval_metric_epoch": 0,
"best_eval_metric_steps": 10804,
"best_eval_metric_value": 1.2898268699645996,
"best_eval_test_metrics": {
"combined": {
"loss": 1.2531609535217285
},
"title": {
"bleu": 0.0180843286216259,
"char_error_rate": 9.017064094543457,
"loss": 1.2531609535217285,
"next_token_perplexity": 17219.509765625,
"perplexity": 31900.73046875,
"rouge1_fmeasure": 0.11042051762342453,
"rouge1_precision": 0.060600001364946365,
"rouge1_recall": 0.6982588171958923,
"rouge2_fmeasure": 0.05184469372034073,
"rouge2_precision": 0.028261005878448486,
"rouge2_recall": 0.3632716238498688,
"rougeL_fmeasure": 0.09752275049686432,
"rougeL_precision": 0.053463079035282135,
"rougeL_recall": 0.6240717768669128,
"rougeLsum_fmeasure": 0.10595671832561493,
"rougeLsum_precision": 0.05813445523381233,
"rougeLsum_recall": 0.6721860766410828,
"sequence_accuracy": 0.0,
"token_accuracy": 3.437707346165553e-05,
"word_error_rate": 11.557415008544922
}
},
"best_eval_train_metrics": {
"combined": {
"loss": 1.1997183561325073
},
"title": {
"bleu": 0.018341578543186188,
"char_error_rate": 9.083020210266113,
"loss": 1.1997183561325073,
"next_token_perplexity": 17061.228515625,
"perplexity": 31874.365234375,
"rouge1_fmeasure": 0.11363156139850616,
"rouge1_precision": 0.062363866716623306,
"rouge1_recall": 0.7099939584732056,
"rouge2_fmeasure": 0.05589400604367256,
"rouge2_precision": 0.030487291514873505,
"rouge2_recall": 0.380205899477005,
"rougeL_fmeasure": 0.10056732594966888,
"rougeL_precision": 0.055148735642433167,
"rougeL_recall": 0.6347331404685974,
"rougeLsum_fmeasure": 0.10977762192487717,
"rougeLsum_precision": 0.060230888426303864,
"rougeLsum_recall": 0.6885234117507935,
"sequence_accuracy": 0.0,
"token_accuracy": 3.639719579950906e-05,
"word_error_rate": 11.418218612670898
}
},
"best_eval_validation_metrics": {
"combined": {
"loss": 1.2898268699645996
},
"title": {
"bleu": 0.015481029637157917,
"char_error_rate": 9.0972900390625,
"loss": 1.2898268699645996,
"next_token_perplexity": 17339.482421875,
"perplexity": 31906.75390625,
"rouge1_fmeasure": 0.10748682916164398,
"rouge1_precision": 0.058967169374227524,
"rouge1_recall": 0.6764949560165405,
"rouge2_fmeasure": 0.050337474793195724,
"rouge2_precision": 0.02745284140110016,
"rouge2_recall": 0.3486025929450989,
"rougeL_fmeasure": 0.09531988948583603,
"rougeL_precision": 0.05225425586104393,
"rougeL_recall": 0.6058534979820251,
"rougeLsum_fmeasure": 0.103290855884552,
"rougeLsum_precision": 0.056647758930921555,
"rougeLsum_recall": 0.6526778936386108,
"sequence_accuracy": 0.0,
"token_accuracy": 0.0,
"word_error_rate": 11.665448188781738
}
},
"best_increase_batch_size_eval_metric": Infinity,
"checkpoint_number": 1,
"epoch": 1,
"last_improvement_steps": 0,
"last_increase_batch_size": 0,
"last_increase_batch_size_eval_metric_improvement": 0,
"last_increase_batch_size_steps": 0,
"last_learning_rate_reduction": 0,
"last_learning_rate_reduction_steps": 0,
"learning_rate": 0.0004,
"num_increases_batch_size": 0,
"num_reductions_learning_rate": 0,
"steps": 10804,
"test_metrics": {
"combined": {
"loss": [
[
0,
10804,
1.2531609535217285
]
]
},
"title": {
"bleu": [
[
0,
10804,
0.0180843286216259
]
],
"char_error_rate": [
[
0,
10804,
9.017064094543457
]
],
"loss": [
[
0,
10804,
1.2531609535217285
]
],
"next_token_perplexity": [
[
0,
10804,
17219.509765625
]
],
"perplexity": [
[
0,
10804,
31900.73046875
]
],
"rouge1_fmeasure": [
[
0,
10804,
0.11042051762342453
]
],
"rouge1_precision": [
[
0,
10804,
0.060600001364946365
]
],
"rouge1_recall": [
[
0,
10804,
0.6982588171958923
]
],
"rouge2_fmeasure": [
[
0,
10804,
0.05184469372034073
]
],
"rouge2_precision": [
[
0,
10804,
0.028261005878448486
]
],
"rouge2_recall": [
[
0,
10804,
0.3632716238498688
]
],
"rougeL_fmeasure": [
[
0,
10804,
0.09752275049686432
]
],
"rougeL_precision": [
[
0,
10804,
0.053463079035282135
]
],
"rougeL_recall": [
[
0,
10804,
0.6240717768669128
]
],
"rougeLsum_fmeasure": [
[
0,
10804,
0.10595671832561493
]
],
"rougeLsum_precision": [
[
0,
10804,
0.05813445523381233
]
],
"rougeLsum_recall": [
[
0,
10804,
0.6721860766410828
]
],
"sequence_accuracy": [
[
0,
10804,
0.0
]
],
"token_accuracy": [
[
0,
10804,
3.437707346165553e-05
]
],
"word_error_rate": [
[
0,
10804,
11.557415008544922
]
]
}
},
"train_metrics": {
"combined": {
"loss": [
[
0,
10804,
1.1997183561325073
]
]
},
"title": {
"bleu": [
[
0,
10804,
0.018341578543186188
]
],
"char_error_rate": [
[
0,
10804,
9.083020210266113
]
],
"loss": [
[
0,
10804,
1.1997183561325073
]
],
"next_token_perplexity": [
[
0,
10804,
17061.228515625
]
],
"perplexity": [
[
0,
10804,
31874.365234375
]
],
"rouge1_fmeasure": [
[
0,
10804,
0.11363156139850616
]
],
"rouge1_precision": [
[
0,
10804,
0.062363866716623306
]
],
"rouge1_recall": [
[
0,
10804,
0.7099939584732056
]
],
"rouge2_fmeasure": [
[
0,
10804,
0.05589400604367256
]
],
"rouge2_precision": [
[
0,
10804,
0.030487291514873505
]
],
"rouge2_recall": [
[
0,
10804,
0.380205899477005
]
],
"rougeL_fmeasure": [
[
0,
10804,
0.10056732594966888
]
],
"rougeL_precision": [
[
0,
10804,
0.055148735642433167
]
],
"rougeL_recall": [
[
0,
10804,
0.6347331404685974
]
],
"rougeLsum_fmeasure": [
[
0,
10804,
0.10977762192487717
]
],
"rougeLsum_precision": [
[
0,
10804,
0.060230888426303864
]
],
"rougeLsum_recall": [
[
0,
10804,
0.6885234117507935
]
],
"sequence_accuracy": [
[
0,
10804,
0.0
]
],
"token_accuracy": [
[
0,
10804,
3.639719579950906e-05
]
],
"word_error_rate": [
[
0,
10804,
11.418218612670898
]
]
}
},
"tune_checkpoint_num": 0,
"validation_metrics": {
"combined": {
"loss": [
[
0,
10804,
1.2898268699645996
]
]
},
"title": {
"bleu": [
[
0,
10804,
0.015481029637157917
]
],
"char_error_rate": [
[
0,
10804,
9.0972900390625
]
],
"loss": [
[
0,
10804,
1.2898268699645996
]
],
"next_token_perplexity": [
[
0,
10804,
17339.482421875
]
],
"perplexity": [
[
0,
10804,
31906.75390625
]
],
"rouge1_fmeasure": [
[
0,
10804,
0.10748682916164398
]
],
"rouge1_precision": [
[
0,
10804,
0.058967169374227524
]
],
"rouge1_recall": [
[
0,
10804,
0.6764949560165405
]
],
"rouge2_fmeasure": [
[
0,
10804,
0.050337474793195724
]
],
"rouge2_precision": [
[
0,
10804,
0.02745284140110016
]
],
"rouge2_recall": [
[
0,
10804,
0.3486025929450989
]
],
"rougeL_fmeasure": [
[
0,
10804,
0.09531988948583603
]
],
"rougeL_precision": [
[
0,
10804,
0.05225425586104393
]
],
"rougeL_recall": [
[
0,
10804,
0.6058534979820251
]
],
"rougeLsum_fmeasure": [
[
0,
10804,
0.103290855884552
]
],
"rougeLsum_precision": [
[
0,
10804,
0.056647758930921555
]
],
"rougeLsum_recall": [
[
0,
10804,
0.6526778936386108
]
],
"sequence_accuracy": [
[
0,
10804,
0.0
]
],
"token_accuracy": [
[
0,
10804,
0.0
]
],
"word_error_rate": [
[
0,
10804,
11.665448188781738
]
]
}
}
}