ppo-Huggy / run_logs /training_status.json
kjamesh's picture
Huggy
4cb6a2f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199728,
"file_path": "results/Huggy2/Huggy/Huggy-199728.onnx",
"reward": 2.939747240394354,
"creation_time": 1745552881.5688062,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199728.pt"
]
},
{
"steps": 399848,
"file_path": "results/Huggy2/Huggy/Huggy-399848.onnx",
"reward": 4.3098974097858775,
"creation_time": 1745553144.0658364,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399848.pt"
]
},
{
"steps": 599921,
"file_path": "results/Huggy2/Huggy/Huggy-599921.onnx",
"reward": 4.017245549904673,
"creation_time": 1745553389.435314,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599921.pt"
]
},
{
"steps": 799924,
"file_path": "results/Huggy2/Huggy/Huggy-799924.onnx",
"reward": 3.8962511671216866,
"creation_time": 1745553624.820012,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799924.pt"
]
},
{
"steps": 999904,
"file_path": "results/Huggy2/Huggy/Huggy-999904.onnx",
"reward": 4.2311712936498225,
"creation_time": 1745553863.0827758,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999904.pt"
]
},
{
"steps": 1199925,
"file_path": "results/Huggy2/Huggy/Huggy-1199925.onnx",
"reward": 3.730260723762298,
"creation_time": 1745554099.382184,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199925.pt"
]
},
{
"steps": 1399924,
"file_path": "results/Huggy2/Huggy/Huggy-1399924.onnx",
"reward": 3.5605772654215495,
"creation_time": 1745554334.4793048,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399924.pt"
]
},
{
"steps": 1599894,
"file_path": "results/Huggy2/Huggy/Huggy-1599894.onnx",
"reward": 4.094278802568057,
"creation_time": 1745554568.401811,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599894.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy2/Huggy/Huggy-1799945.onnx",
"reward": 4.035154617109964,
"creation_time": 1745554807.4993157,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999984,
"file_path": "results/Huggy2/Huggy/Huggy-1999984.onnx",
"reward": 4.135568237695538,
"creation_time": 1745555047.9928343,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999984.pt"
]
},
{
"steps": 2000053,
"file_path": "results/Huggy2/Huggy/Huggy-2000053.onnx",
"reward": 4.145287092654936,
"creation_time": 1745555048.0989504,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000053.pt"
]
}
],
"final_checkpoint": {
"steps": 2000053,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.145287092654936,
"creation_time": 1745555048.0989504,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000053.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}