ppo-Huggy / run_logs /training_status.json
marcogfedozzi's picture
first trained Huggy model
6a6e40e
raw
history blame
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199832,
"file_path": "results/Huggy/Huggy/Huggy-199832.onnx",
"reward": 3.121102605995379,
"creation_time": 1703932389.1494014,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199832.pt"
]
},
{
"steps": 399884,
"file_path": "results/Huggy/Huggy/Huggy-399884.onnx",
"reward": 3.8856379939060584,
"creation_time": 1703932695.0876338,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399884.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy/Huggy/Huggy-599938.onnx",
"reward": 3.800588830312093,
"creation_time": 1703933010.1747403,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799868,
"file_path": "results/Huggy/Huggy/Huggy-799868.onnx",
"reward": 3.7686460192440547,
"creation_time": 1703933317.3878238,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799868.pt"
]
},
{
"steps": 999986,
"file_path": "results/Huggy/Huggy/Huggy-999986.onnx",
"reward": 3.4822133013180325,
"creation_time": 1703933632.0346224,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999986.pt"
]
},
{
"steps": 1199938,
"file_path": "results/Huggy/Huggy/Huggy-1199938.onnx",
"reward": 3.521691937930882,
"creation_time": 1703933947.0405598,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199938.pt"
]
},
{
"steps": 1399997,
"file_path": "results/Huggy/Huggy/Huggy-1399997.onnx",
"reward": 3.8150465203597483,
"creation_time": 1703934257.828833,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399997.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy/Huggy/Huggy-1599992.onnx",
"reward": 3.7630314372288876,
"creation_time": 1703934570.8867824,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799872,
"file_path": "results/Huggy/Huggy/Huggy-1799872.onnx",
"reward": 3.8861979266931845,
"creation_time": 1703934881.8187938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799872.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy/Huggy/Huggy-1999977.onnx",
"reward": 3.501771330833435,
"creation_time": 1703935196.5698686,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000045,
"file_path": "results/Huggy/Huggy/Huggy-2000045.onnx",
"reward": 3.4906200245022774,
"creation_time": 1703935197.0526142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000045.pt"
]
}
],
"final_checkpoint": {
"steps": 2000045,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4906200245022774,
"creation_time": 1703935197.0526142,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000045.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}