ppo-Huggy / run_logs /training_status.json
MarBar's picture
Huggy
645c558
{
"Huggy": {
"checkpoints": [
{
"steps": 199946,
"file_path": "results/Huggy/Huggy/Huggy-199946.onnx",
"reward": 3.1949710803972162,
"creation_time": 1697938460.98449,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199946.pt"
]
},
{
"steps": 399848,
"file_path": "results/Huggy/Huggy/Huggy-399848.onnx",
"reward": 3.991611120956285,
"creation_time": 1697938706.2916565,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399848.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy/Huggy/Huggy-599971.onnx",
"reward": 4.130415449539821,
"creation_time": 1697938952.5404034,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799859,
"file_path": "results/Huggy/Huggy/Huggy-799859.onnx",
"reward": 3.56465741859542,
"creation_time": 1697939192.0772908,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799859.pt"
]
},
{
"steps": 999995,
"file_path": "results/Huggy/Huggy/Huggy-999995.onnx",
"reward": 3.6419521329599784,
"creation_time": 1697939435.332916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999995.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy/Huggy/Huggy-1199992.onnx",
"reward": 3.9038674647991476,
"creation_time": 1697939671.60082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399936,
"file_path": "results/Huggy/Huggy/Huggy-1399936.onnx",
"reward": 3.9828068488669173,
"creation_time": 1697939904.6268108,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399936.pt"
]
},
{
"steps": 1599946,
"file_path": "results/Huggy/Huggy/Huggy-1599946.onnx",
"reward": 3.7295397326646262,
"creation_time": 1697940145.512017,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599946.pt"
]
},
{
"steps": 1799979,
"file_path": "results/Huggy/Huggy/Huggy-1799979.onnx",
"reward": 3.96812718301206,
"creation_time": 1697940389.5086613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799979.pt"
]
},
{
"steps": 1999370,
"file_path": "results/Huggy/Huggy/Huggy-1999370.onnx",
"reward": null,
"creation_time": 1697940633.997436,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999370.pt"
]
},
{
"steps": 2000120,
"file_path": "results/Huggy/Huggy/Huggy-2000120.onnx",
"reward": -3.8918020725250244,
"creation_time": 1697940634.1291683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000120.pt"
]
}
],
"final_checkpoint": {
"steps": 2000120,
"file_path": "results/Huggy/Huggy.onnx",
"reward": -3.8918020725250244,
"creation_time": 1697940634.1291683,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000120.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}