ppo-Huggy / run_logs /training_status.json
aga3134's picture
Huggy
202395f
{
"Huggy": {
"checkpoints": [
{
"steps": 199898,
"file_path": "results/Huggy/Huggy/Huggy-199898.onnx",
"reward": 3.3162058353424073,
"creation_time": 1686116665.8217328,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199898.pt"
]
},
{
"steps": 399943,
"file_path": "results/Huggy/Huggy/Huggy-399943.onnx",
"reward": 3.9707446563041815,
"creation_time": 1686116899.0284274,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399943.pt"
]
},
{
"steps": 599888,
"file_path": "results/Huggy/Huggy/Huggy-599888.onnx",
"reward": 4.772530657904489,
"creation_time": 1686117133.8724282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599888.pt"
]
},
{
"steps": 799908,
"file_path": "results/Huggy/Huggy/Huggy-799908.onnx",
"reward": 4.0911466017529206,
"creation_time": 1686117366.2124243,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799908.pt"
]
},
{
"steps": 999889,
"file_path": "results/Huggy/Huggy/Huggy-999889.onnx",
"reward": 4.015794594498241,
"creation_time": 1686117599.8091285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999889.pt"
]
},
{
"steps": 1199912,
"file_path": "results/Huggy/Huggy/Huggy-1199912.onnx",
"reward": 3.7894405550078343,
"creation_time": 1686117838.068258,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199912.pt"
]
},
{
"steps": 1399926,
"file_path": "results/Huggy/Huggy/Huggy-1399926.onnx",
"reward": 4.058708346823356,
"creation_time": 1686118072.3917196,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399926.pt"
]
},
{
"steps": 1599983,
"file_path": "results/Huggy/Huggy/Huggy-1599983.onnx",
"reward": 3.861815992742777,
"creation_time": 1686118314.1507144,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599983.pt"
]
},
{
"steps": 1799926,
"file_path": "results/Huggy/Huggy/Huggy-1799926.onnx",
"reward": 3.8736105987696146,
"creation_time": 1686118554.08406,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799926.pt"
]
},
{
"steps": 1999999,
"file_path": "results/Huggy/Huggy/Huggy-1999999.onnx",
"reward": 3.965607390321534,
"creation_time": 1686118792.4898071,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999999.pt"
]
},
{
"steps": 2000110,
"file_path": "results/Huggy/Huggy/Huggy-2000110.onnx",
"reward": 4.001765778509237,
"creation_time": 1686118792.6125753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
],
"final_checkpoint": {
"steps": 2000110,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.001765778509237,
"creation_time": 1686118792.6125753,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000110.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}