ppo-Huggy / run_logs /training_status.json
Quangvuisme's picture
Huggy
3a91a2c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199793,
"file_path": "results/Huggy2/Huggy/Huggy-199793.onnx",
"reward": 3.787763702869415,
"creation_time": 1754703453.0328639,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199793.pt"
]
},
{
"steps": 399946,
"file_path": "results/Huggy2/Huggy/Huggy-399946.onnx",
"reward": 3.6647362602290823,
"creation_time": 1754703706.6290402,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399946.pt"
]
},
{
"steps": 599951,
"file_path": "results/Huggy2/Huggy/Huggy-599951.onnx",
"reward": 5.044776096940041,
"creation_time": 1754703959.314479,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599951.pt"
]
},
{
"steps": 799925,
"file_path": "results/Huggy2/Huggy/Huggy-799925.onnx",
"reward": 3.810695577626941,
"creation_time": 1754704209.5634632,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799925.pt"
]
},
{
"steps": 999959,
"file_path": "results/Huggy2/Huggy/Huggy-999959.onnx",
"reward": 3.777022907085586,
"creation_time": 1754704463.1625612,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999959.pt"
]
},
{
"steps": 1199946,
"file_path": "results/Huggy2/Huggy/Huggy-1199946.onnx",
"reward": 3.3993400067699198,
"creation_time": 1754704712.6642895,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199946.pt"
]
},
{
"steps": 1399605,
"file_path": "results/Huggy2/Huggy/Huggy-1399605.onnx",
"reward": 3.5803508449365746,
"creation_time": 1754704960.783127,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399605.pt"
]
},
{
"steps": 1599899,
"file_path": "results/Huggy2/Huggy/Huggy-1599899.onnx",
"reward": 3.6362866404402348,
"creation_time": 1754705210.9288814,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599899.pt"
]
},
{
"steps": 1799998,
"file_path": "results/Huggy2/Huggy/Huggy-1799998.onnx",
"reward": 3.8036385890725373,
"creation_time": 1754705461.5129879,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799998.pt"
]
},
{
"steps": 1999955,
"file_path": "results/Huggy2/Huggy/Huggy-1999955.onnx",
"reward": 4.000876188278198,
"creation_time": 1754705710.1280348,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999955.pt"
]
},
{
"steps": 2000143,
"file_path": "results/Huggy2/Huggy/Huggy-2000143.onnx",
"reward": 4.527245461940765,
"creation_time": 1754705710.2501295,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000143.pt"
]
}
],
"final_checkpoint": {
"steps": 2000143,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.527245461940765,
"creation_time": 1754705710.2501295,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000143.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}