ppo-Huggy / run_logs /training_status.json
Kevius's picture
Huggy
6e36efb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199972,
"file_path": "results/Huggy2/Huggy/Huggy-199972.onnx",
"reward": 3.3848331157977762,
"creation_time": 1718559259.9246309,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199972.pt"
]
},
{
"steps": 399856,
"file_path": "results/Huggy2/Huggy/Huggy-399856.onnx",
"reward": 3.6872993921166035,
"creation_time": 1718559491.5220327,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399856.pt"
]
},
{
"steps": 599992,
"file_path": "results/Huggy2/Huggy/Huggy-599992.onnx",
"reward": 3.3127930879592897,
"creation_time": 1718559724.2453754,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599992.pt"
]
},
{
"steps": 799869,
"file_path": "results/Huggy2/Huggy/Huggy-799869.onnx",
"reward": 3.8431046090511063,
"creation_time": 1718559954.6734335,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799869.pt"
]
},
{
"steps": 999402,
"file_path": "results/Huggy2/Huggy/Huggy-999402.onnx",
"reward": 3.630750412378854,
"creation_time": 1718560189.4814317,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999402.pt"
]
},
{
"steps": 1199950,
"file_path": "results/Huggy2/Huggy/Huggy-1199950.onnx",
"reward": 3.889456471976112,
"creation_time": 1718560429.0252795,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199950.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Huggy2/Huggy/Huggy-1399979.onnx",
"reward": 3.8498820833495406,
"creation_time": 1718560661.241895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy2/Huggy/Huggy-1599998.onnx",
"reward": 3.8446306768662293,
"creation_time": 1718560894.6239984,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799992,
"file_path": "results/Huggy2/Huggy/Huggy-1799992.onnx",
"reward": 3.8803198309960187,
"creation_time": 1718561132.7877963,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799992.pt"
]
},
{
"steps": 1999884,
"file_path": "results/Huggy2/Huggy/Huggy-1999884.onnx",
"reward": 4.236785970482171,
"creation_time": 1718561368.797164,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999884.pt"
]
},
{
"steps": 2000003,
"file_path": "results/Huggy2/Huggy/Huggy-2000003.onnx",
"reward": 4.257667227433278,
"creation_time": 1718561368.916217,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000003.pt"
]
}
],
"final_checkpoint": {
"steps": 2000003,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.257667227433278,
"creation_time": 1718561368.916217,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000003.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}