ppo-Huggy / run_logs /training_status.json
gsalmon's picture
Huggy
4c487cd verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199943,
"file_path": "results/Huggy2/Huggy/Huggy-199943.onnx",
"reward": 3.6044786159808817,
"creation_time": 1715563738.8890324,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199943.pt"
]
},
{
"steps": 399905,
"file_path": "results/Huggy2/Huggy/Huggy-399905.onnx",
"reward": 3.619776088897496,
"creation_time": 1715564001.0724533,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399905.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy2/Huggy/Huggy-599933.onnx",
"reward": 3.8493998050689697,
"creation_time": 1715564268.5380034,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799951,
"file_path": "results/Huggy2/Huggy/Huggy-799951.onnx",
"reward": 3.662301534081098,
"creation_time": 1715564529.4491947,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799951.pt"
]
},
{
"steps": 999982,
"file_path": "results/Huggy2/Huggy/Huggy-999982.onnx",
"reward": 3.8138877148258277,
"creation_time": 1715564799.620251,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999982.pt"
]
},
{
"steps": 1199571,
"file_path": "results/Huggy2/Huggy/Huggy-1199571.onnx",
"reward": 3.6807407631593594,
"creation_time": 1715565064.7888298,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199571.pt"
]
},
{
"steps": 1399397,
"file_path": "results/Huggy2/Huggy/Huggy-1399397.onnx",
"reward": 3.497524031251669,
"creation_time": 1715565331.4881942,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399397.pt"
]
},
{
"steps": 1599961,
"file_path": "results/Huggy2/Huggy/Huggy-1599961.onnx",
"reward": 3.565810152608105,
"creation_time": 1715565596.8272183,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599961.pt"
]
},
{
"steps": 1799900,
"file_path": "results/Huggy2/Huggy/Huggy-1799900.onnx",
"reward": 3.2088309661397396,
"creation_time": 1715565863.2238204,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799900.pt"
]
},
{
"steps": 1999881,
"file_path": "results/Huggy2/Huggy/Huggy-1999881.onnx",
"reward": 2.938766658306122,
"creation_time": 1715566126.6300108,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999881.pt"
]
},
{
"steps": 2000631,
"file_path": "results/Huggy2/Huggy/Huggy-2000631.onnx",
"reward": 2.27707658873664,
"creation_time": 1715566126.7885404,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000631.pt"
]
}
],
"final_checkpoint": {
"steps": 2000631,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.27707658873664,
"creation_time": 1715566126.7885404,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000631.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}