ppo-Huggy / run_logs /training_status.json
PhilSad's picture
Huggy
a13d89e
{
"Huggy": {
"checkpoints": [
{
"steps": 199981,
"file_path": "results/Huggy/Huggy/Huggy-199981.onnx",
"reward": 3.331180773564239,
"creation_time": 1679002745.4364514,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199981.pt"
]
},
{
"steps": 399833,
"file_path": "results/Huggy/Huggy/Huggy-399833.onnx",
"reward": 4.120620560646057,
"creation_time": 1679002969.9215589,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399833.pt"
]
},
{
"steps": 599874,
"file_path": "results/Huggy/Huggy/Huggy-599874.onnx",
"reward": 3.798865884542465,
"creation_time": 1679003198.0187047,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599874.pt"
]
},
{
"steps": 799995,
"file_path": "results/Huggy/Huggy/Huggy-799995.onnx",
"reward": 3.7538366065943305,
"creation_time": 1679003428.602358,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799995.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy/Huggy/Huggy-999934.onnx",
"reward": 4.058848343158172,
"creation_time": 1679003660.2727952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199988,
"file_path": "results/Huggy/Huggy/Huggy-1199988.onnx",
"reward": 3.8248371926831526,
"creation_time": 1679003894.0374503,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199988.pt"
]
},
{
"steps": 1399882,
"file_path": "results/Huggy/Huggy/Huggy-1399882.onnx",
"reward": 4.15095487007728,
"creation_time": 1679004124.249188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399882.pt"
]
},
{
"steps": 1599959,
"file_path": "results/Huggy/Huggy/Huggy-1599959.onnx",
"reward": 3.8458386596137957,
"creation_time": 1679004349.73608,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599959.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.5469986470704225,
"creation_time": 1679004583.5459344,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999487,
"file_path": "results/Huggy/Huggy/Huggy-1999487.onnx",
"reward": 3.2186071437138777,
"creation_time": 1679004818.393559,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999487.pt"
]
},
{
"steps": 2000237,
"file_path": "results/Huggy/Huggy/Huggy-2000237.onnx",
"reward": 2.9473287021672285,
"creation_time": 1679004818.5372157,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000237.pt"
]
}
],
"final_checkpoint": {
"steps": 2000237,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.9473287021672285,
"creation_time": 1679004818.5372157,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000237.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}