huggy-rl-v1 / run_logs /training_status.json
pranavdurai's picture
Huggy
5dedb04
{
"Huggy": {
"checkpoints": [
{
"steps": 199909,
"file_path": "results/Huggy/Huggy/Huggy-199909.onnx",
"reward": 3.44622627000014,
"creation_time": 1700926255.8209898,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199909.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy/Huggy/Huggy-399945.onnx",
"reward": 3.6717693056121017,
"creation_time": 1700926535.1225116,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599873,
"file_path": "results/Huggy/Huggy/Huggy-599873.onnx",
"reward": 3.6860159120776435,
"creation_time": 1700926817.8359349,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599873.pt"
]
},
{
"steps": 799967,
"file_path": "results/Huggy/Huggy/Huggy-799967.onnx",
"reward": 4.04901224346115,
"creation_time": 1700927102.633436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799967.pt"
]
},
{
"steps": 999838,
"file_path": "results/Huggy/Huggy/Huggy-999838.onnx",
"reward": 3.875352392457936,
"creation_time": 1700927388.702098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999838.pt"
]
},
{
"steps": 1199967,
"file_path": "results/Huggy/Huggy/Huggy-1199967.onnx",
"reward": 3.753776306975378,
"creation_time": 1700927677.7097387,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199967.pt"
]
},
{
"steps": 1399938,
"file_path": "results/Huggy/Huggy/Huggy-1399938.onnx",
"reward": 5.1511918067932125,
"creation_time": 1700927967.385668,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399938.pt"
]
},
{
"steps": 1599993,
"file_path": "results/Huggy/Huggy/Huggy-1599993.onnx",
"reward": 3.8298877055446305,
"creation_time": 1700928250.9163933,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599993.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.916596708695094,
"creation_time": 1700928536.4825776,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999984,
"file_path": "results/Huggy/Huggy/Huggy-1999984.onnx",
"reward": 3.221725859949666,
"creation_time": 1700928825.7602067,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999984.pt"
]
},
{
"steps": 2000036,
"file_path": "results/Huggy/Huggy/Huggy-2000036.onnx",
"reward": 3.2206712029874325,
"creation_time": 1700928825.8767865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
],
"final_checkpoint": {
"steps": 2000036,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.2206712029874325,
"creation_time": 1700928825.8767865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000036.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}