ppo-Huggy / run_logs /training_status.json
micheljperez's picture
Huggy
4658da4
raw
history blame contribute delete
No virus
4.38 kB
{
"Huggy": {
"checkpoints": [
{
"steps": 199642,
"file_path": "results/Huggy/Huggy/Huggy-199642.onnx",
"reward": 3.3408852869814094,
"creation_time": 1696951351.066657,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199642.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy/Huggy/Huggy-399995.onnx",
"reward": 3.8581132473129975,
"creation_time": 1696951588.3978288,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599953,
"file_path": "results/Huggy/Huggy/Huggy-599953.onnx",
"reward": 3.8165545654296875,
"creation_time": 1696951829.5304844,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599953.pt"
]
},
{
"steps": 799938,
"file_path": "results/Huggy/Huggy/Huggy-799938.onnx",
"reward": 3.9907866439548147,
"creation_time": 1696952068.3049042,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799938.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 3.9267869674798215,
"creation_time": 1696952309.4688065,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199991,
"file_path": "results/Huggy/Huggy/Huggy-1199991.onnx",
"reward": 4.183989282264266,
"creation_time": 1696952553.7304053,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199991.pt"
]
},
{
"steps": 1399993,
"file_path": "results/Huggy/Huggy/Huggy-1399993.onnx",
"reward": 3.392833543320497,
"creation_time": 1696952800.2592373,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399993.pt"
]
},
{
"steps": 1599947,
"file_path": "results/Huggy/Huggy/Huggy-1599947.onnx",
"reward": 3.9308800111340068,
"creation_time": 1696953041.6891658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599947.pt"
]
},
{
"steps": 1799952,
"file_path": "results/Huggy/Huggy/Huggy-1799952.onnx",
"reward": 3.958530896546825,
"creation_time": 1696953286.6476877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799952.pt"
]
},
{
"steps": 1999979,
"file_path": "results/Huggy/Huggy/Huggy-1999979.onnx",
"reward": 4.117876688171836,
"creation_time": 1696953531.0502884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999979.pt"
]
},
{
"steps": 2000044,
"file_path": "results/Huggy/Huggy/Huggy-2000044.onnx",
"reward": 4.121083632458088,
"creation_time": 1696953531.1514134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
],
"final_checkpoint": {
"steps": 2000044,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.121083632458088,
"creation_time": 1696953531.1514134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000044.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}