ppo-Huggy / run_logs /training_status.json
Vaibhavoutat's picture
Huggy
a39d847
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199806,
"file_path": "results/Huggy/Huggy/Huggy-199806.onnx",
"reward": 3.4517753680547076,
"creation_time": 1679578034.9763367,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199806.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.2503503240756135,
"creation_time": 1679578292.9756367,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy/Huggy/Huggy-599971.onnx",
"reward": 3.8478473871946335,
"creation_time": 1679578570.9616416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799908,
"file_path": "results/Huggy/Huggy/Huggy-799908.onnx",
"reward": 3.6900924963453794,
"creation_time": 1679578846.2597647,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799908.pt"
]
},
{
"steps": 999951,
"file_path": "results/Huggy/Huggy/Huggy-999951.onnx",
"reward": 3.7201429458998017,
"creation_time": 1679579126.23681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999951.pt"
]
},
{
"steps": 1199908,
"file_path": "results/Huggy/Huggy/Huggy-1199908.onnx",
"reward": 3.6715741070305428,
"creation_time": 1679579406.7769518,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199908.pt"
]
},
{
"steps": 1399862,
"file_path": "results/Huggy/Huggy/Huggy-1399862.onnx",
"reward": 3.7815613010525704,
"creation_time": 1679579678.4398293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399862.pt"
]
},
{
"steps": 1599963,
"file_path": "results/Huggy/Huggy/Huggy-1599963.onnx",
"reward": 3.4691841876506806,
"creation_time": 1679579951.2063842,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599963.pt"
]
},
{
"steps": 1799916,
"file_path": "results/Huggy/Huggy/Huggy-1799916.onnx",
"reward": 3.7666024017333983,
"creation_time": 1679580220.6197333,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799916.pt"
]
},
{
"steps": 1999397,
"file_path": "results/Huggy/Huggy/Huggy-1999397.onnx",
"reward": 2.9913243452707925,
"creation_time": 1679580494.9543087,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999397.pt"
]
},
{
"steps": 2000147,
"file_path": "results/Huggy/Huggy/Huggy-2000147.onnx",
"reward": 2.061979055404663,
"creation_time": 1679580495.1983304,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000147.pt"
]
}
],
"final_checkpoint": {
"steps": 2000147,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.061979055404663,
"creation_time": 1679580495.1983304,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000147.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}