ppo-Huggy / run_logs /training_status.json
B-ramB's picture
Huggy
cd0fb9f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199986,
"file_path": "results/Huggy2/Huggy/Huggy-199986.onnx",
"reward": 3.139234672142909,
"creation_time": 1745662000.825055,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199986.pt"
]
},
{
"steps": 399970,
"file_path": "results/Huggy2/Huggy/Huggy-399970.onnx",
"reward": 3.709269802047782,
"creation_time": 1745662247.4142642,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399970.pt"
]
},
{
"steps": 599889,
"file_path": "results/Huggy2/Huggy/Huggy-599889.onnx",
"reward": 3.2334143022696176,
"creation_time": 1745662496.2212846,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599889.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.9724702480766507,
"creation_time": 1745662742.0731046,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy2/Huggy/Huggy-999977.onnx",
"reward": 3.6982783588670913,
"creation_time": 1745662995.3152635,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1199456,
"file_path": "results/Huggy2/Huggy/Huggy-1199456.onnx",
"reward": 3.721044616621049,
"creation_time": 1745663242.4809468,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199456.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy2/Huggy/Huggy-1399999.onnx",
"reward": 3.7480582378127356,
"creation_time": 1745663496.8820581,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599874,
"file_path": "results/Huggy2/Huggy/Huggy-1599874.onnx",
"reward": 3.677422293411788,
"creation_time": 1745663746.419581,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599874.pt"
]
},
{
"steps": 1799900,
"file_path": "results/Huggy2/Huggy/Huggy-1799900.onnx",
"reward": 3.86895230092293,
"creation_time": 1745663995.7557068,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799900.pt"
]
},
{
"steps": 1999948,
"file_path": "results/Huggy2/Huggy/Huggy-1999948.onnx",
"reward": 4.081705995400747,
"creation_time": 1745664247.5433795,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999948.pt"
]
},
{
"steps": 2000021,
"file_path": "results/Huggy2/Huggy/Huggy-2000021.onnx",
"reward": 4.078873536625847,
"creation_time": 1745664247.70961,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
],
"final_checkpoint": {
"steps": 2000021,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.078873536625847,
"creation_time": 1745664247.70961,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000021.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}