ppo-Huggy / run_logs /training_status.json
WilsonLai's picture
Huggy
9438a5a verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199718,
"file_path": "results/Huggy2/Huggy/Huggy-199718.onnx",
"reward": 3.6553778742489063,
"creation_time": 1757246430.6154413,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199718.pt"
]
},
{
"steps": 399852,
"file_path": "results/Huggy2/Huggy/Huggy-399852.onnx",
"reward": 3.5544274017214774,
"creation_time": 1757246679.460903,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399852.pt"
]
},
{
"steps": 599943,
"file_path": "results/Huggy2/Huggy/Huggy-599943.onnx",
"reward": 3.616568697350366,
"creation_time": 1757246933.989429,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599943.pt"
]
},
{
"steps": 799971,
"file_path": "results/Huggy2/Huggy/Huggy-799971.onnx",
"reward": 3.7069734855733736,
"creation_time": 1757247179.6064758,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799971.pt"
]
},
{
"steps": 999971,
"file_path": "results/Huggy2/Huggy/Huggy-999971.onnx",
"reward": 3.803959564423897,
"creation_time": 1757247432.3227696,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999971.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy2/Huggy/Huggy-1199999.onnx",
"reward": 3.529003584269181,
"creation_time": 1757247681.6345978,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399944,
"file_path": "results/Huggy2/Huggy/Huggy-1399944.onnx",
"reward": 4.37443807831517,
"creation_time": 1757247932.2803357,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399944.pt"
]
},
{
"steps": 1599981,
"file_path": "results/Huggy2/Huggy/Huggy-1599981.onnx",
"reward": 3.8525354840816597,
"creation_time": 1757248181.5778391,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599981.pt"
]
},
{
"steps": 1799964,
"file_path": "results/Huggy2/Huggy/Huggy-1799964.onnx",
"reward": 3.7245860812140674,
"creation_time": 1757248430.9716418,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799964.pt"
]
},
{
"steps": 1999940,
"file_path": "results/Huggy2/Huggy/Huggy-1999940.onnx",
"reward": 3.8003379821777346,
"creation_time": 1757248681.6337242,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999940.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy2/Huggy/Huggy-2000016.onnx",
"reward": 3.8060976851220225,
"creation_time": 1757248681.7464647,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8060976851220225,
"creation_time": 1757248681.7464647,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}