ppo-Huggy / run_logs /training_status.json
Winmodel's picture
Huggy
9d1f49f
{
"Huggy": {
"checkpoints": [
{
"steps": 199965,
"file_path": "results/Huggy/Huggy/Huggy-199965.onnx",
"reward": 3.472507493537769,
"creation_time": 1688876610.5335107,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199965.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy/Huggy/Huggy-399984.onnx",
"reward": 3.5477325581014156,
"creation_time": 1688876867.3630888,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599721,
"file_path": "results/Huggy/Huggy/Huggy-599721.onnx",
"reward": 4.277174052439238,
"creation_time": 1688877129.4965496,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599721.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.775895574834289,
"creation_time": 1688877393.032784,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999509,
"file_path": "results/Huggy/Huggy/Huggy-999509.onnx",
"reward": 3.8379916602914985,
"creation_time": 1688877660.3432589,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999509.pt"
]
},
{
"steps": 1199937,
"file_path": "results/Huggy/Huggy/Huggy-1199937.onnx",
"reward": 3.9629319874016016,
"creation_time": 1688877926.889072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199937.pt"
]
},
{
"steps": 1399976,
"file_path": "results/Huggy/Huggy/Huggy-1399976.onnx",
"reward": 3.5126738650042837,
"creation_time": 1688878188.443511,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399976.pt"
]
},
{
"steps": 1599438,
"file_path": "results/Huggy/Huggy/Huggy-1599438.onnx",
"reward": 3.517661327855629,
"creation_time": 1688878451.3188474,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599438.pt"
]
},
{
"steps": 1799991,
"file_path": "results/Huggy/Huggy/Huggy-1799991.onnx",
"reward": 3.689263160412128,
"creation_time": 1688878715.697442,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799991.pt"
]
},
{
"steps": 1999601,
"file_path": "results/Huggy/Huggy/Huggy-1999601.onnx",
"reward": 3.5600472592759407,
"creation_time": 1688878971.9452097,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999601.pt"
]
},
{
"steps": 2000351,
"file_path": "results/Huggy/Huggy/Huggy-2000351.onnx",
"reward": 3.524019775390625,
"creation_time": 1688878972.0969791,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000351.pt"
]
}
],
"final_checkpoint": {
"steps": 2000351,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.524019775390625,
"creation_time": 1688878972.0969791,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000351.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}