ppo-Huggy / run_logs /training_status.json
lambdavi's picture
Huggy
f79d0d6
{
"Huggy": {
"checkpoints": [
{
"steps": 199853,
"file_path": "results/Huggy/Huggy/Huggy-199853.onnx",
"reward": 3.7373219834906712,
"creation_time": 1701033438.5574615,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199853.pt"
]
},
{
"steps": 399962,
"file_path": "results/Huggy/Huggy/Huggy-399962.onnx",
"reward": 4.16405305029854,
"creation_time": 1701033679.906854,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399962.pt"
]
},
{
"steps": 599977,
"file_path": "results/Huggy/Huggy/Huggy-599977.onnx",
"reward": 3.86000634431839,
"creation_time": 1701033922.8393683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599977.pt"
]
},
{
"steps": 799961,
"file_path": "results/Huggy/Huggy/Huggy-799961.onnx",
"reward": 3.861483975802318,
"creation_time": 1701034162.1861637,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799961.pt"
]
},
{
"steps": 999933,
"file_path": "results/Huggy/Huggy/Huggy-999933.onnx",
"reward": 3.7270862823113418,
"creation_time": 1701034405.6825724,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999933.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy/Huggy/Huggy-1199970.onnx",
"reward": 3.8894913509870186,
"creation_time": 1701034651.2347665,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy/Huggy/Huggy-1399976.onnx",
"reward": 4.587229549884796,
"creation_time": 1701034899.1946588,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599397,
"file_path": "results/Huggy/Huggy/Huggy-1599397.onnx",
"reward": 3.712484639227105,
"creation_time": 1701035142.8950663,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599397.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 4.0866402345937445,
"creation_time": 1701035391.2373965,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999973,
"file_path": "results/Huggy/Huggy/Huggy-1999973.onnx",
"reward": 3.29299092818709,
"creation_time": 1701035645.1960187,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999973.pt"
]
},
{
"steps": 2000038,
"file_path": "results/Huggy/Huggy/Huggy-2000038.onnx",
"reward": 3.2873713987461035,
"creation_time": 1701035645.2975018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
],
"final_checkpoint": {
"steps": 2000038,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2873713987461035,
"creation_time": 1701035645.2975018,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.1+cu121"
}
}