minmingzhu02's picture
Upload folder using huggingface_hub
6c364c7 verified
{
"best_metric": null,
"best_model_checkpoint": null,
"epoch": 0.49419322955275513,
"eval_steps": 500,
"global_step": 500,
"is_hyper_param_search": false,
"is_local_process_zero": true,
"is_world_process_zero": true,
"log_history": [
{
"epoch": 0.01,
"grad_norm": 1.2311009168624878,
"learning_rate": 5.6012058970266934e-05,
"loss": 1.6789,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 10,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.02,
"grad_norm": 1.4872009754180908,
"learning_rate": 7.287336883921704e-05,
"loss": 1.3884,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 20,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.03,
"grad_norm": 0.7868104577064514,
"learning_rate": 8.273660282559241e-05,
"loss": 1.2404,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 30,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.04,
"grad_norm": 0.35713163018226624,
"learning_rate": 8.973467870816715e-05,
"loss": 1.2036,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 40,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.05,
"grad_norm": 0.3057297468185425,
"learning_rate": 9.516280807158375e-05,
"loss": 1.1788,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 50,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.06,
"grad_norm": 0.3608625531196594,
"learning_rate": 9.959791269454252e-05,
"loss": 1.1525,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 60,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.07,
"grad_norm": 0.3684042990207672,
"learning_rate": 9.959204487506375e-05,
"loss": 1.1261,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 70,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.08,
"grad_norm": 0.4175470471382141,
"learning_rate": 9.908210096889343e-05,
"loss": 1.1214,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 80,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.09,
"grad_norm": 0.4869045317173004,
"learning_rate": 9.85721570627231e-05,
"loss": 1.1043,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 90,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.1,
"grad_norm": 0.6399329900741577,
"learning_rate": 9.806221315655279e-05,
"loss": 1.1059,
"max_memory_allocated (GB)": 91.88,
"memory_allocated (GB)": 14.99,
"step": 100,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.11,
"grad_norm": 0.5639649033546448,
"learning_rate": 9.755226925038246e-05,
"loss": 1.0711,
"max_memory_allocated (GB)": 91.91,
"memory_allocated (GB)": 14.99,
"step": 110,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.12,
"grad_norm": 0.44946518540382385,
"learning_rate": 9.704232534421214e-05,
"loss": 1.0644,
"max_memory_allocated (GB)": 91.91,
"memory_allocated (GB)": 14.99,
"step": 120,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.13,
"grad_norm": 0.5573060512542725,
"learning_rate": 9.653238143804181e-05,
"loss": 1.0634,
"max_memory_allocated (GB)": 91.92,
"memory_allocated (GB)": 14.99,
"step": 130,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.14,
"grad_norm": 0.6105266213417053,
"learning_rate": 9.60224375318715e-05,
"loss": 1.0597,
"max_memory_allocated (GB)": 91.92,
"memory_allocated (GB)": 14.99,
"step": 140,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.15,
"grad_norm": 0.6286391615867615,
"learning_rate": 9.551249362570118e-05,
"loss": 1.0528,
"max_memory_allocated (GB)": 91.92,
"memory_allocated (GB)": 14.99,
"step": 150,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.16,
"grad_norm": 0.8733624815940857,
"learning_rate": 9.500254971953085e-05,
"loss": 1.0524,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 160,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.17,
"grad_norm": 0.6268635392189026,
"learning_rate": 9.449260581336054e-05,
"loss": 1.0345,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 170,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.18,
"grad_norm": 0.5832647681236267,
"learning_rate": 9.398266190719021e-05,
"loss": 1.0262,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 180,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.19,
"grad_norm": 0.6518144011497498,
"learning_rate": 9.347271800101989e-05,
"loss": 1.0318,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 190,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.2,
"grad_norm": 0.49274373054504395,
"learning_rate": 9.296277409484956e-05,
"loss": 1.0298,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 200,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.21,
"grad_norm": 0.5237769484519958,
"learning_rate": 9.245283018867925e-05,
"loss": 1.0176,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 210,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.22,
"grad_norm": 0.564319372177124,
"learning_rate": 9.194288628250894e-05,
"loss": 1.0172,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 220,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.23,
"grad_norm": 0.4697343111038208,
"learning_rate": 9.14329423763386e-05,
"loss": 1.0262,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 230,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.24,
"grad_norm": 0.5207454562187195,
"learning_rate": 9.092299847016829e-05,
"loss": 1.024,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 240,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.25,
"grad_norm": 0.4637609124183655,
"learning_rate": 9.041305456399796e-05,
"loss": 1.0069,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 250,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.26,
"grad_norm": 0.47436627745628357,
"learning_rate": 8.990311065782764e-05,
"loss": 1.0119,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 260,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.27,
"grad_norm": 0.5096576809883118,
"learning_rate": 8.939316675165733e-05,
"loss": 1.0092,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 270,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.28,
"grad_norm": 0.5780492424964905,
"learning_rate": 8.8883222845487e-05,
"loss": 1.0082,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 280,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.29,
"grad_norm": 0.4528846740722656,
"learning_rate": 8.837327893931669e-05,
"loss": 1.0057,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 290,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.3,
"grad_norm": 0.5259899497032166,
"learning_rate": 8.786333503314635e-05,
"loss": 0.9989,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 300,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.31,
"grad_norm": 0.5007658004760742,
"learning_rate": 8.735339112697604e-05,
"loss": 0.9997,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 310,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.32,
"grad_norm": 0.4527484178543091,
"learning_rate": 8.684344722080571e-05,
"loss": 0.9949,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 320,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.33,
"grad_norm": 0.4365575611591339,
"learning_rate": 8.633350331463539e-05,
"loss": 0.9943,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 330,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.34,
"grad_norm": 0.49428853392601013,
"learning_rate": 8.582355940846507e-05,
"loss": 1.0005,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 340,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.35,
"grad_norm": 0.4611368775367737,
"learning_rate": 8.531361550229475e-05,
"loss": 0.9972,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 350,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.36,
"grad_norm": 0.43548157811164856,
"learning_rate": 8.480367159612444e-05,
"loss": 0.9833,
"max_memory_allocated (GB)": 91.93,
"memory_allocated (GB)": 14.99,
"step": 360,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.37,
"grad_norm": 0.4797479808330536,
"learning_rate": 8.42937276899541e-05,
"loss": 0.981,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 370,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.38,
"grad_norm": 0.44958415627479553,
"learning_rate": 8.378378378378379e-05,
"loss": 0.9969,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 380,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.39,
"grad_norm": 0.4499351680278778,
"learning_rate": 8.327383987761347e-05,
"loss": 0.9847,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 390,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.4,
"grad_norm": 0.45021358132362366,
"learning_rate": 8.276389597144315e-05,
"loss": 0.9874,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 400,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.41,
"grad_norm": 0.4754478335380554,
"learning_rate": 8.225395206527282e-05,
"loss": 0.9955,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 410,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.42,
"grad_norm": 0.44393980503082275,
"learning_rate": 8.17440081591025e-05,
"loss": 0.9898,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 420,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.43,
"grad_norm": 0.43429532647132874,
"learning_rate": 8.123406425293219e-05,
"loss": 0.9905,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 430,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.43,
"grad_norm": 0.4695710837841034,
"learning_rate": 8.072412034676186e-05,
"loss": 0.9702,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 440,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.44,
"grad_norm": 0.40997833013534546,
"learning_rate": 8.021417644059154e-05,
"loss": 0.9825,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 450,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.45,
"grad_norm": 0.4330343008041382,
"learning_rate": 7.970423253442122e-05,
"loss": 0.9777,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 460,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.46,
"grad_norm": 0.42674386501312256,
"learning_rate": 7.91942886282509e-05,
"loss": 0.9794,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 470,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.47,
"grad_norm": 0.4461188316345215,
"learning_rate": 7.868434472208057e-05,
"loss": 0.979,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 480,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.48,
"grad_norm": 0.4532679617404938,
"learning_rate": 7.817440081591025e-05,
"loss": 0.9764,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 490,
"total_memory_available (GB)": 94.62
},
{
"epoch": 0.49,
"grad_norm": 0.42160096764564514,
"learning_rate": 7.766445690973994e-05,
"loss": 0.967,
"max_memory_allocated (GB)": 91.97,
"memory_allocated (GB)": 14.99,
"step": 500,
"total_memory_available (GB)": 94.62
}
],
"logging_steps": 10,
"max_steps": 2022,
"num_input_tokens_seen": 0,
"num_train_epochs": 2,
"save_steps": 500,
"total_flos": 5.597410974551245e+18,
"train_batch_size": 8,
"trial_name": null,
"trial_params": null
}