ppo-Huggy / run_logs /training_status.json
Oslaw's picture
Huggy
5e4adb9
{
"Huggy": {
"checkpoints": [
{
"steps": 199569,
"file_path": "results/Huggy/Huggy/Huggy-199569.onnx",
"reward": 3.2660838429744428,
"creation_time": 1689597828.3118713,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199569.pt"
]
},
{
"steps": 399965,
"file_path": "results/Huggy/Huggy/Huggy-399965.onnx",
"reward": 3.649475265951718,
"creation_time": 1689598076.0454369,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399965.pt"
]
},
{
"steps": 599851,
"file_path": "results/Huggy/Huggy/Huggy-599851.onnx",
"reward": 3.7009510718859158,
"creation_time": 1689598327.104695,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599851.pt"
]
},
{
"steps": 799962,
"file_path": "results/Huggy/Huggy/Huggy-799962.onnx",
"reward": 3.7640208651919567,
"creation_time": 1689598580.8891003,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799962.pt"
]
},
{
"steps": 999965,
"file_path": "results/Huggy/Huggy/Huggy-999965.onnx",
"reward": 4.171847300796673,
"creation_time": 1689598841.637663,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999965.pt"
]
},
{
"steps": 1199981,
"file_path": "results/Huggy/Huggy/Huggy-1199981.onnx",
"reward": 3.7900656955582757,
"creation_time": 1689599099.446611,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199981.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy/Huggy/Huggy-1399984.onnx",
"reward": 3.102135252952576,
"creation_time": 1689599361.1005483,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599996,
"file_path": "results/Huggy/Huggy/Huggy-1599996.onnx",
"reward": 4.004090823233128,
"creation_time": 1689599617.771999,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599996.pt"
]
},
{
"steps": 1799969,
"file_path": "results/Huggy/Huggy/Huggy-1799969.onnx",
"reward": 4.172963680699468,
"creation_time": 1689599876.4248552,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799969.pt"
]
},
{
"steps": 1999921,
"file_path": "results/Huggy/Huggy/Huggy-1999921.onnx",
"reward": 3.979848512962683,
"creation_time": 1689600133.5385616,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999921.pt"
]
},
{
"steps": 2000006,
"file_path": "results/Huggy/Huggy/Huggy-2000006.onnx",
"reward": 3.9902892673716828,
"creation_time": 1689600133.7404778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
}
],
"final_checkpoint": {
"steps": 2000006,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9902892673716828,
"creation_time": 1689600133.7404778,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000006.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}