ppo-Huggy / run_logs /training_status.json
jonathansuru's picture
Huggy
18d0d7f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199863,
"file_path": "results/Huggy2/Huggy/Huggy-199863.onnx",
"reward": 3.1376120219491934,
"creation_time": 1729092451.3450522,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199863.pt"
]
},
{
"steps": 399882,
"file_path": "results/Huggy2/Huggy/Huggy-399882.onnx",
"reward": 3.977807922609921,
"creation_time": 1729092690.3199093,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399882.pt"
]
},
{
"steps": 599957,
"file_path": "results/Huggy2/Huggy/Huggy-599957.onnx",
"reward": 4.198198281801664,
"creation_time": 1729092922.6827192,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599957.pt"
]
},
{
"steps": 799977,
"file_path": "results/Huggy2/Huggy/Huggy-799977.onnx",
"reward": 3.8662597142070174,
"creation_time": 1729093153.6456656,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799977.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy2/Huggy/Huggy-999960.onnx",
"reward": 3.791854295363793,
"creation_time": 1729093391.2285795,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199910,
"file_path": "results/Huggy2/Huggy/Huggy-1199910.onnx",
"reward": 3.367018124370864,
"creation_time": 1729093631.4740849,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199910.pt"
]
},
{
"steps": 1399919,
"file_path": "results/Huggy2/Huggy/Huggy-1399919.onnx",
"reward": 3.843611240386963,
"creation_time": 1729093870.8248618,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399919.pt"
]
},
{
"steps": 1599870,
"file_path": "results/Huggy2/Huggy/Huggy-1599870.onnx",
"reward": 3.901366401177186,
"creation_time": 1729094103.6042678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599870.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Huggy2/Huggy/Huggy-1799952.onnx",
"reward": 3.842864220061999,
"creation_time": 1729094338.387891,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999947,
"file_path": "results/Huggy2/Huggy/Huggy-1999947.onnx",
"reward": 3.025953371077776,
"creation_time": 1729094578.4812696,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999947.pt"
]
},
{
"steps": 2000028,
"file_path": "results/Huggy2/Huggy/Huggy-2000028.onnx",
"reward": 3.035334561810349,
"creation_time": 1729094578.59848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
],
"final_checkpoint": {
"steps": 2000028,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.035334561810349,
"creation_time": 1729094578.59848,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.4.1+cu121"
}
}