ppo-Huggy / run_logs /training_status.json
yoyoh's picture
PPO Huggy training
76c8945 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199844,
"file_path": "results/Huggy/Huggy/Huggy-199844.onnx",
"reward": 3.4413261120135967,
"creation_time": 1742524298.063352,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199844.pt"
]
},
{
"steps": 399884,
"file_path": "results/Huggy/Huggy/Huggy-399884.onnx",
"reward": 3.966407258664408,
"creation_time": 1742524377.9347398,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399884.pt"
]
},
{
"steps": 599977,
"file_path": "results/Huggy/Huggy/Huggy-599977.onnx",
"reward": 3.5861989770616804,
"creation_time": 1742524459.8031886,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599977.pt"
]
},
{
"steps": 799996,
"file_path": "results/Huggy/Huggy/Huggy-799996.onnx",
"reward": 3.6686166274547576,
"creation_time": 1742524540.8793783,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799996.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy/Huggy/Huggy-999961.onnx",
"reward": 3.792508786286765,
"creation_time": 1742524623.2915132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy/Huggy/Huggy-1199983.onnx",
"reward": 3.479892955699437,
"creation_time": 1742524706.1440096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy/Huggy/Huggy-1399957.onnx",
"reward": 3.2694222075598582,
"creation_time": 1742524789.2083259,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599876,
"file_path": "results/Huggy/Huggy/Huggy-1599876.onnx",
"reward": 3.9053122327877925,
"creation_time": 1742524871.8603525,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599876.pt"
]
},
{
"steps": 1799444,
"file_path": "results/Huggy/Huggy/Huggy-1799444.onnx",
"reward": 4.076754432571821,
"creation_time": 1742524955.462213,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799444.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 3.943865082173977,
"creation_time": 1742525038.821255,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy/Huggy/Huggy-2000007.onnx",
"reward": 3.9115339550707073,
"creation_time": 1742525038.8800135,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9115339550707073,
"creation_time": 1742525038.8800135,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}