RobertaPCM / trainer_state.json
MuhammedSaeed's picture
Upload 7 files
616a93b
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 45.96219931271477,
"global_step": 53500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.43,
"learning_rate": 4.97852233676976e-05,
"loss": 2.4481,
"step": 500
},
{
"epoch": 0.86,
"learning_rate": 4.957044673539519e-05,
"loss": 2.0258,
"step": 1000
},
{
"epoch": 1.29,
"learning_rate": 4.935567010309279e-05,
"loss": 1.8794,
"step": 1500
},
{
"epoch": 1.72,
"learning_rate": 4.9140893470790375e-05,
"loss": 1.8082,
"step": 2000
},
{
"epoch": 2.15,
"learning_rate": 4.892611683848797e-05,
"loss": 1.7553,
"step": 2500
},
{
"epoch": 2.58,
"learning_rate": 4.871134020618557e-05,
"loss": 1.6993,
"step": 3000
},
{
"epoch": 3.01,
"learning_rate": 4.849656357388316e-05,
"loss": 1.6629,
"step": 3500
},
{
"epoch": 3.44,
"learning_rate": 4.828178694158076e-05,
"loss": 1.6249,
"step": 4000
},
{
"epoch": 3.87,
"learning_rate": 4.806701030927835e-05,
"loss": 1.6062,
"step": 4500
},
{
"epoch": 4.3,
"learning_rate": 4.785223367697595e-05,
"loss": 1.5661,
"step": 5000
},
{
"epoch": 4.73,
"learning_rate": 4.763745704467354e-05,
"loss": 1.5406,
"step": 5500
},
{
"epoch": 5.15,
"learning_rate": 4.7422680412371134e-05,
"loss": 1.5285,
"step": 6000
},
{
"epoch": 5.58,
"learning_rate": 4.720790378006873e-05,
"loss": 1.5024,
"step": 6500
},
{
"epoch": 6.01,
"learning_rate": 4.6993127147766324e-05,
"loss": 1.4933,
"step": 7000
},
{
"epoch": 6.44,
"learning_rate": 4.677835051546392e-05,
"loss": 1.474,
"step": 7500
},
{
"epoch": 6.87,
"learning_rate": 4.656357388316151e-05,
"loss": 1.46,
"step": 8000
},
{
"epoch": 7.3,
"learning_rate": 4.634879725085911e-05,
"loss": 1.4409,
"step": 8500
},
{
"epoch": 7.73,
"learning_rate": 4.61340206185567e-05,
"loss": 1.4251,
"step": 9000
},
{
"epoch": 8.16,
"learning_rate": 4.5919243986254295e-05,
"loss": 1.4157,
"step": 9500
},
{
"epoch": 8.59,
"learning_rate": 4.570446735395189e-05,
"loss": 1.3975,
"step": 10000
},
{
"epoch": 9.02,
"learning_rate": 4.5489690721649484e-05,
"loss": 1.3924,
"step": 10500
},
{
"epoch": 9.45,
"learning_rate": 4.527491408934708e-05,
"loss": 1.3699,
"step": 11000
},
{
"epoch": 9.88,
"learning_rate": 4.5060137457044674e-05,
"loss": 1.3711,
"step": 11500
},
{
"epoch": 10.31,
"learning_rate": 4.484536082474227e-05,
"loss": 1.3514,
"step": 12000
},
{
"epoch": 10.74,
"learning_rate": 4.463058419243987e-05,
"loss": 1.3502,
"step": 12500
},
{
"epoch": 11.17,
"learning_rate": 4.4415807560137455e-05,
"loss": 1.3308,
"step": 13000
},
{
"epoch": 11.6,
"learning_rate": 4.4201030927835053e-05,
"loss": 1.3288,
"step": 13500
},
{
"epoch": 12.03,
"learning_rate": 4.3986254295532645e-05,
"loss": 1.3197,
"step": 14000
},
{
"epoch": 12.46,
"learning_rate": 4.377147766323024e-05,
"loss": 1.3022,
"step": 14500
},
{
"epoch": 12.89,
"learning_rate": 4.3556701030927835e-05,
"loss": 1.3039,
"step": 15000
},
{
"epoch": 13.32,
"learning_rate": 4.334192439862543e-05,
"loss": 1.2948,
"step": 15500
},
{
"epoch": 13.75,
"learning_rate": 4.312714776632303e-05,
"loss": 1.2842,
"step": 16000
},
{
"epoch": 14.18,
"learning_rate": 4.2912371134020616e-05,
"loss": 1.2804,
"step": 16500
},
{
"epoch": 14.6,
"learning_rate": 4.2697594501718214e-05,
"loss": 1.2646,
"step": 17000
},
{
"epoch": 15.03,
"learning_rate": 4.2482817869415805e-05,
"loss": 1.2672,
"step": 17500
},
{
"epoch": 15.46,
"learning_rate": 4.2268041237113404e-05,
"loss": 1.2501,
"step": 18000
},
{
"epoch": 15.89,
"learning_rate": 4.2053264604811e-05,
"loss": 1.2566,
"step": 18500
},
{
"epoch": 16.32,
"learning_rate": 4.1838487972508593e-05,
"loss": 1.2406,
"step": 19000
},
{
"epoch": 16.75,
"learning_rate": 4.162371134020619e-05,
"loss": 1.2383,
"step": 19500
},
{
"epoch": 17.18,
"learning_rate": 4.140893470790378e-05,
"loss": 1.2246,
"step": 20000
},
{
"epoch": 17.61,
"learning_rate": 4.1194158075601375e-05,
"loss": 1.2219,
"step": 20500
},
{
"epoch": 18.04,
"learning_rate": 4.097938144329897e-05,
"loss": 1.2174,
"step": 21000
},
{
"epoch": 18.47,
"learning_rate": 4.0764604810996564e-05,
"loss": 1.2089,
"step": 21500
},
{
"epoch": 18.9,
"learning_rate": 4.054982817869416e-05,
"loss": 1.2049,
"step": 22000
},
{
"epoch": 19.33,
"learning_rate": 4.0335051546391754e-05,
"loss": 1.2014,
"step": 22500
},
{
"epoch": 19.76,
"learning_rate": 4.012027491408935e-05,
"loss": 1.1965,
"step": 23000
},
{
"epoch": 20.19,
"learning_rate": 3.9905498281786944e-05,
"loss": 1.184,
"step": 23500
},
{
"epoch": 20.62,
"learning_rate": 3.9690721649484535e-05,
"loss": 1.178,
"step": 24000
},
{
"epoch": 21.05,
"learning_rate": 3.9475945017182134e-05,
"loss": 1.1801,
"step": 24500
},
{
"epoch": 21.48,
"learning_rate": 3.9261168384879725e-05,
"loss": 1.1661,
"step": 25000
},
{
"epoch": 21.91,
"learning_rate": 3.904639175257732e-05,
"loss": 1.1687,
"step": 25500
},
{
"epoch": 22.34,
"learning_rate": 3.8831615120274915e-05,
"loss": 1.1636,
"step": 26000
},
{
"epoch": 22.77,
"learning_rate": 3.861683848797251e-05,
"loss": 1.1578,
"step": 26500
},
{
"epoch": 23.2,
"learning_rate": 3.8402061855670104e-05,
"loss": 1.1456,
"step": 27000
},
{
"epoch": 23.63,
"learning_rate": 3.8187285223367696e-05,
"loss": 1.148,
"step": 27500
},
{
"epoch": 24.05,
"learning_rate": 3.7972508591065294e-05,
"loss": 1.1458,
"step": 28000
},
{
"epoch": 24.48,
"learning_rate": 3.7757731958762886e-05,
"loss": 1.1348,
"step": 28500
},
{
"epoch": 24.91,
"learning_rate": 3.7542955326460484e-05,
"loss": 1.1359,
"step": 29000
},
{
"epoch": 25.34,
"learning_rate": 3.7328178694158075e-05,
"loss": 1.1266,
"step": 29500
},
{
"epoch": 25.77,
"learning_rate": 3.7113402061855674e-05,
"loss": 1.1193,
"step": 30000
},
{
"epoch": 26.2,
"learning_rate": 3.689862542955327e-05,
"loss": 1.1201,
"step": 30500
},
{
"epoch": 26.63,
"learning_rate": 3.6683848797250856e-05,
"loss": 1.1177,
"step": 31000
},
{
"epoch": 27.06,
"learning_rate": 3.6469072164948455e-05,
"loss": 1.1102,
"step": 31500
},
{
"epoch": 27.49,
"learning_rate": 3.6254295532646046e-05,
"loss": 1.1038,
"step": 32000
},
{
"epoch": 27.92,
"learning_rate": 3.6039518900343644e-05,
"loss": 1.1086,
"step": 32500
},
{
"epoch": 28.35,
"learning_rate": 3.5824742268041236e-05,
"loss": 1.0972,
"step": 33000
},
{
"epoch": 28.78,
"learning_rate": 3.5609965635738834e-05,
"loss": 1.1021,
"step": 33500
},
{
"epoch": 29.21,
"learning_rate": 3.539518900343643e-05,
"loss": 1.0974,
"step": 34000
},
{
"epoch": 29.64,
"learning_rate": 3.5180412371134024e-05,
"loss": 1.0839,
"step": 34500
},
{
"epoch": 30.07,
"learning_rate": 3.4965635738831615e-05,
"loss": 1.0918,
"step": 35000
},
{
"epoch": 30.5,
"learning_rate": 3.475085910652921e-05,
"loss": 1.0809,
"step": 35500
},
{
"epoch": 30.93,
"learning_rate": 3.4536082474226805e-05,
"loss": 1.0789,
"step": 36000
},
{
"epoch": 31.36,
"learning_rate": 3.43213058419244e-05,
"loss": 1.0741,
"step": 36500
},
{
"epoch": 31.79,
"learning_rate": 3.4106529209621995e-05,
"loss": 1.0701,
"step": 37000
},
{
"epoch": 32.22,
"learning_rate": 3.389175257731959e-05,
"loss": 1.0648,
"step": 37500
},
{
"epoch": 32.65,
"learning_rate": 3.3676975945017185e-05,
"loss": 1.0627,
"step": 38000
},
{
"epoch": 33.08,
"learning_rate": 3.3462199312714776e-05,
"loss": 1.0659,
"step": 38500
},
{
"epoch": 33.51,
"learning_rate": 3.3247422680412374e-05,
"loss": 1.05,
"step": 39000
},
{
"epoch": 33.93,
"learning_rate": 3.3032646048109966e-05,
"loss": 1.0496,
"step": 39500
},
{
"epoch": 34.36,
"learning_rate": 3.2817869415807564e-05,
"loss": 1.0472,
"step": 40000
},
{
"epoch": 34.79,
"learning_rate": 3.2603092783505155e-05,
"loss": 1.0439,
"step": 40500
},
{
"epoch": 35.22,
"learning_rate": 3.2388316151202754e-05,
"loss": 1.0453,
"step": 41000
},
{
"epoch": 35.65,
"learning_rate": 3.2173539518900345e-05,
"loss": 1.0418,
"step": 41500
},
{
"epoch": 36.08,
"learning_rate": 3.1958762886597937e-05,
"loss": 1.0342,
"step": 42000
},
{
"epoch": 36.51,
"learning_rate": 3.1743986254295535e-05,
"loss": 1.0258,
"step": 42500
},
{
"epoch": 36.94,
"learning_rate": 3.1529209621993126e-05,
"loss": 1.0319,
"step": 43000
},
{
"epoch": 37.37,
"learning_rate": 3.1314432989690725e-05,
"loss": 1.0238,
"step": 43500
},
{
"epoch": 37.8,
"learning_rate": 3.1099656357388316e-05,
"loss": 1.0274,
"step": 44000
},
{
"epoch": 38.23,
"learning_rate": 3.0884879725085914e-05,
"loss": 1.0233,
"step": 44500
},
{
"epoch": 38.66,
"learning_rate": 3.0670103092783506e-05,
"loss": 1.0209,
"step": 45000
},
{
"epoch": 39.09,
"learning_rate": 3.0455326460481097e-05,
"loss": 1.0164,
"step": 45500
},
{
"epoch": 39.52,
"learning_rate": 3.0240549828178692e-05,
"loss": 1.0027,
"step": 46000
},
{
"epoch": 39.95,
"learning_rate": 3.002577319587629e-05,
"loss": 1.0168,
"step": 46500
},
{
"epoch": 40.38,
"learning_rate": 2.9810996563573885e-05,
"loss": 1.0003,
"step": 47000
},
{
"epoch": 40.81,
"learning_rate": 2.959621993127148e-05,
"loss": 1.0067,
"step": 47500
},
{
"epoch": 41.24,
"learning_rate": 2.9381443298969075e-05,
"loss": 0.9941,
"step": 48000
},
{
"epoch": 41.67,
"learning_rate": 2.916666666666667e-05,
"loss": 1.0014,
"step": 48500
},
{
"epoch": 42.1,
"learning_rate": 2.8951890034364265e-05,
"loss": 0.9963,
"step": 49000
},
{
"epoch": 42.53,
"learning_rate": 2.8737113402061856e-05,
"loss": 0.9899,
"step": 49500
},
{
"epoch": 42.96,
"learning_rate": 2.852233676975945e-05,
"loss": 0.9882,
"step": 50000
},
{
"epoch": 43.38,
"learning_rate": 2.8307560137457046e-05,
"loss": 0.9832,
"step": 50500
},
{
"epoch": 43.81,
"learning_rate": 2.809278350515464e-05,
"loss": 0.9817,
"step": 51000
},
{
"epoch": 44.24,
"learning_rate": 2.7878006872852236e-05,
"loss": 0.9816,
"step": 51500
},
{
"epoch": 44.67,
"learning_rate": 2.766323024054983e-05,
"loss": 0.9795,
"step": 52000
},
{
"epoch": 45.1,
"learning_rate": 2.7448453608247425e-05,
"loss": 0.9737,
"step": 52500
},
{
"epoch": 45.53,
"learning_rate": 2.7233676975945017e-05,
"loss": 0.9693,
"step": 53000
},
{
"epoch": 45.96,
"learning_rate": 2.701890034364261e-05,
"loss": 0.9735,
"step": 53500
}
],
"max_steps": 116400,
"num_train_epochs": 100,
"total_flos": 4.504557638270131e+17,
"trial_name": null,
"trial_params": null
}