ppo-Huggy / run_logs /training_status.json
SVBilenko's picture
Huggy
654b176 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199868,
"file_path": "results/Huggy/Huggy/Huggy-199868.onnx",
"reward": 3.3678108642971707,
"creation_time": 1756069253.4850132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199868.pt"
]
},
{
"steps": 399986,
"file_path": "results/Huggy/Huggy/Huggy-399986.onnx",
"reward": 3.625485097510474,
"creation_time": 1756069490.28306,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399986.pt"
]
},
{
"steps": 599893,
"file_path": "results/Huggy/Huggy/Huggy-599893.onnx",
"reward": 3.8287987547951774,
"creation_time": 1756069733.592332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599893.pt"
]
},
{
"steps": 799931,
"file_path": "results/Huggy/Huggy/Huggy-799931.onnx",
"reward": 3.8762027426407886,
"creation_time": 1756069969.3520803,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799931.pt"
]
},
{
"steps": 999957,
"file_path": "results/Huggy/Huggy/Huggy-999957.onnx",
"reward": 4.026466396036027,
"creation_time": 1756070208.453845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999957.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 3.8426312251133963,
"creation_time": 1756070449.9336514,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy/Huggy/Huggy-1399982.onnx",
"reward": 3.53261452285867,
"creation_time": 1756070693.267963,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599978,
"file_path": "results/Huggy/Huggy/Huggy-1599978.onnx",
"reward": 3.8955016276415657,
"creation_time": 1756070932.5981908,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599978.pt"
]
},
{
"steps": 1799940,
"file_path": "results/Huggy/Huggy/Huggy-1799940.onnx",
"reward": 3.995154064215586,
"creation_time": 1756071182.1632857,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799940.pt"
]
},
{
"steps": 1999971,
"file_path": "results/Huggy/Huggy/Huggy-1999971.onnx",
"reward": 3.7959503790911504,
"creation_time": 1756071437.139011,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999971.pt"
]
},
{
"steps": 2000061,
"file_path": "results/Huggy/Huggy/Huggy-2000061.onnx",
"reward": 3.826584971228311,
"creation_time": 1756071437.248009,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000061.pt"
]
}
],
"final_checkpoint": {
"steps": 2000061,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.826584971228311,
"creation_time": 1756071437.248009,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000061.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}