ppo-Huggy / run_logs /training_status.json
mwissing's picture
Huggy
00ba430
{
"Huggy": {
"checkpoints": [
{
"steps": 199915,
"file_path": "results/Huggy/Huggy/Huggy-199915.onnx",
"reward": 3.581363221346322,
"creation_time": 1675445473.1648016,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199915.pt"
]
},
{
"steps": 399942,
"file_path": "results/Huggy/Huggy/Huggy-399942.onnx",
"reward": 3.7599378313337053,
"creation_time": 1675445687.5047703,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399942.pt"
]
},
{
"steps": 599858,
"file_path": "results/Huggy/Huggy/Huggy-599858.onnx",
"reward": 3.795403833742495,
"creation_time": 1675445904.3964446,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599858.pt"
]
},
{
"steps": 799976,
"file_path": "results/Huggy/Huggy/Huggy-799976.onnx",
"reward": 3.5539253340370354,
"creation_time": 1675446118.8896747,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799976.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.7158552981514967,
"creation_time": 1675446336.2546399,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy/Huggy/Huggy-1199950.onnx",
"reward": 3.728861411412557,
"creation_time": 1675446553.9575865,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399373,
"file_path": "results/Huggy/Huggy/Huggy-1399373.onnx",
"reward": 3.6681038321506594,
"creation_time": 1675446769.1194453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399373.pt"
]
},
{
"steps": 1599987,
"file_path": "results/Huggy/Huggy/Huggy-1599987.onnx",
"reward": 3.90781403084596,
"creation_time": 1675446989.1628337,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599987.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy/Huggy/Huggy-1799933.onnx",
"reward": 3.5827843623053757,
"creation_time": 1675447209.625541,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999898,
"file_path": "results/Huggy/Huggy/Huggy-1999898.onnx",
"reward": 4.092546670777457,
"creation_time": 1675447428.1550438,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999898.pt"
]
},
{
"steps": 2000015,
"file_path": "results/Huggy/Huggy/Huggy-2000015.onnx",
"reward": 4.162985725535287,
"creation_time": 1675447428.270449,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
],
"final_checkpoint": {
"steps": 2000015,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.162985725535287,
"creation_time": 1675447428.270449,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000015.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}