ppo-Huggy / run_logs /training_status.json
hackvermin's picture
Huggy
c3765d0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199974,
"file_path": "results/Huggy2/Huggy/Huggy-199974.onnx",
"reward": 3.378099623748234,
"creation_time": 1759240698.0101979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199974.pt"
]
},
{
"steps": 399934,
"file_path": "results/Huggy2/Huggy/Huggy-399934.onnx",
"reward": 3.6940031430938025,
"creation_time": 1759240964.4170718,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399934.pt"
]
},
{
"steps": 599899,
"file_path": "results/Huggy2/Huggy/Huggy-599899.onnx",
"reward": 4.17516722490913,
"creation_time": 1759241231.3882718,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599899.pt"
]
},
{
"steps": 799901,
"file_path": "results/Huggy2/Huggy/Huggy-799901.onnx",
"reward": 3.738397101453833,
"creation_time": 1759241494.602783,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799901.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy2/Huggy/Huggy-999990.onnx",
"reward": 3.917864048725938,
"creation_time": 1759241748.1781006,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199972,
"file_path": "results/Huggy2/Huggy/Huggy-1199972.onnx",
"reward": 3.7664989722402473,
"creation_time": 1759241996.7358868,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199972.pt"
]
},
{
"steps": 1399947,
"file_path": "results/Huggy2/Huggy/Huggy-1399947.onnx",
"reward": 3.315894159777411,
"creation_time": 1759242246.62659,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399947.pt"
]
},
{
"steps": 1599967,
"file_path": "results/Huggy2/Huggy/Huggy-1599967.onnx",
"reward": 3.9473767280578613,
"creation_time": 1759242493.4710078,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599967.pt"
]
},
{
"steps": 1799629,
"file_path": "results/Huggy2/Huggy/Huggy-1799629.onnx",
"reward": 3.874115647872289,
"creation_time": 1759242739.0486536,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799629.pt"
]
},
{
"steps": 1999942,
"file_path": "results/Huggy2/Huggy/Huggy-1999942.onnx",
"reward": 3.953919441335731,
"creation_time": 1759242985.378303,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999942.pt"
]
},
{
"steps": 2000042,
"file_path": "results/Huggy2/Huggy/Huggy-2000042.onnx",
"reward": 3.9836439278027784,
"creation_time": 1759242985.4805999,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000042.pt"
]
}
],
"final_checkpoint": {
"steps": 2000042,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.9836439278027784,
"creation_time": 1759242985.4805999,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000042.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}