ppo-Huggy / run_logs /training_status.json
goforit123's picture
Huggy
e390e01 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199988,
"file_path": "results/Huggy2/Huggy/Huggy-199988.onnx",
"reward": 3.2605079097407206,
"creation_time": 1761815973.4892323,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199988.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy2/Huggy/Huggy-399981.onnx",
"reward": 3.8934786924174136,
"creation_time": 1761816241.9684289,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599991,
"file_path": "results/Huggy2/Huggy/Huggy-599991.onnx",
"reward": 3.6748074870556593,
"creation_time": 1761816514.6734662,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599991.pt"
]
},
{
"steps": 799994,
"file_path": "results/Huggy2/Huggy/Huggy-799994.onnx",
"reward": 3.8000684414858594,
"creation_time": 1761816785.0318904,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799994.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy2/Huggy/Huggy-999990.onnx",
"reward": 3.8852584334398736,
"creation_time": 1761817058.359952,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199926,
"file_path": "results/Huggy2/Huggy/Huggy-1199926.onnx",
"reward": 3.985989185585373,
"creation_time": 1761817331.7343748,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199926.pt"
]
},
{
"steps": 1399957,
"file_path": "results/Huggy2/Huggy/Huggy-1399957.onnx",
"reward": 3.476506233215332,
"creation_time": 1761817604.4623957,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399957.pt"
]
},
{
"steps": 1599981,
"file_path": "results/Huggy2/Huggy/Huggy-1599981.onnx",
"reward": 4.04097656984836,
"creation_time": 1761817874.9617407,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599981.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy2/Huggy/Huggy-1799964.onnx",
"reward": 3.9580071272888806,
"creation_time": 1761818150.658128,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy2/Huggy/Huggy-1999998.onnx",
"reward": 3.9186185856660205,
"creation_time": 1761818428.4533455,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000045,
"file_path": "results/Huggy2/Huggy/Huggy-2000045.onnx",
"reward": 3.915421917790272,
"creation_time": 1761818428.5723135,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000045.pt"
]
}
],
"final_checkpoint": {
"steps": 2000045,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.915421917790272,
"creation_time": 1761818428.5723135,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000045.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}