ppo-Huggy / run_logs /training_status.json
Dotanoob's picture
Huggy
0f0730c
{
"Huggy": {
"checkpoints": [
{
"steps": 199853,
"file_path": "results/Huggy/Huggy/Huggy-199853.onnx",
"reward": 3.6169405915520407,
"creation_time": 1700796223.3843646,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199853.pt"
]
},
{
"steps": 399933,
"file_path": "results/Huggy/Huggy/Huggy-399933.onnx",
"reward": 3.6704936580998555,
"creation_time": 1700796481.070088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399933.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy/Huggy/Huggy-599988.onnx",
"reward": 3.9635205439158847,
"creation_time": 1700796741.7297533,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799951,
"file_path": "results/Huggy/Huggy/Huggy-799951.onnx",
"reward": 4.017343042602001,
"creation_time": 1700797002.2506528,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799951.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy/Huggy/Huggy-999954.onnx",
"reward": 3.966984901691507,
"creation_time": 1700797276.503271,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199910,
"file_path": "results/Huggy/Huggy/Huggy-1199910.onnx",
"reward": 3.8238223261303372,
"creation_time": 1700797549.7457757,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199910.pt"
]
},
{
"steps": 1399973,
"file_path": "results/Huggy/Huggy/Huggy-1399973.onnx",
"reward": 3.7827113218929456,
"creation_time": 1700797825.670516,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599929,
"file_path": "results/Huggy/Huggy/Huggy-1599929.onnx",
"reward": 3.832686022420724,
"creation_time": 1700798091.4855597,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599929.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy/Huggy/Huggy-1799959.onnx",
"reward": 3.7395905123816595,
"creation_time": 1700798361.4375384,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy/Huggy/Huggy-1999937.onnx",
"reward": 4.009833451948668,
"creation_time": 1700798631.2719288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy/Huggy/Huggy-2000043.onnx",
"reward": 4.003185646874564,
"creation_time": 1700798631.374076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.003185646874564,
"creation_time": 1700798631.374076,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}