ppo-Huggy / run_logs /training_status.json
JanGr's picture
Huggy
8309f07
{
"Huggy": {
"checkpoints": [
{
"steps": 199852,
"file_path": "results/Huggy/Huggy/Huggy-199852.onnx",
"reward": 3.1522225386956158,
"creation_time": 1697457973.761594,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199852.pt"
]
},
{
"steps": 399964,
"file_path": "results/Huggy/Huggy/Huggy-399964.onnx",
"reward": 3.889893203515273,
"creation_time": 1697458189.6677845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399964.pt"
]
},
{
"steps": 599901,
"file_path": "results/Huggy/Huggy/Huggy-599901.onnx",
"reward": 3.5012564857800803,
"creation_time": 1697458410.3899179,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599901.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy/Huggy/Huggy-799969.onnx",
"reward": 3.834406405827249,
"creation_time": 1697458627.0586362,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999921,
"file_path": "results/Huggy/Huggy/Huggy-999921.onnx",
"reward": 3.8235596347600223,
"creation_time": 1697458852.4328136,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999921.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy/Huggy/Huggy-1199970.onnx",
"reward": 3.810291873019876,
"creation_time": 1697459073.5128593,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy/Huggy/Huggy-1399999.onnx",
"reward": 3.9300078576610935,
"creation_time": 1697459295.1418827,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599946,
"file_path": "results/Huggy/Huggy/Huggy-1599946.onnx",
"reward": 3.9398964368778726,
"creation_time": 1697459515.093909,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599946.pt"
]
},
{
"steps": 1799951,
"file_path": "results/Huggy/Huggy/Huggy-1799951.onnx",
"reward": 3.7817307833469274,
"creation_time": 1697459737.5584621,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799951.pt"
]
},
{
"steps": 1999918,
"file_path": "results/Huggy/Huggy/Huggy-1999918.onnx",
"reward": 3.606171782855149,
"creation_time": 1697459960.4305303,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999918.pt"
]
},
{
"steps": 2000005,
"file_path": "results/Huggy/Huggy/Huggy-2000005.onnx",
"reward": 3.600034409891004,
"creation_time": 1697459960.5316968,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
],
"final_checkpoint": {
"steps": 2000005,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.600034409891004,
"creation_time": 1697459960.5316968,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000005.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}