ppo-Huggy / run_logs /training_status.json
donglinbai's picture
Huggy
b350f6c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199927,
"file_path": "results/Huggy2/Huggy/Huggy-199927.onnx",
"reward": 3.4829368701347936,
"creation_time": 1745479342.833638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199927.pt"
]
},
{
"steps": 399953,
"file_path": "results/Huggy2/Huggy/Huggy-399953.onnx",
"reward": 3.8055771209191587,
"creation_time": 1745479580.8409665,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399953.pt"
]
},
{
"steps": 599993,
"file_path": "results/Huggy2/Huggy/Huggy-599993.onnx",
"reward": 3.7677467810480216,
"creation_time": 1745479823.009159,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599993.pt"
]
},
{
"steps": 799999,
"file_path": "results/Huggy2/Huggy/Huggy-799999.onnx",
"reward": 3.7607817380768913,
"creation_time": 1745480064.920497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799999.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy2/Huggy/Huggy-999969.onnx",
"reward": 4.097267482961927,
"creation_time": 1745480307.5347102,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199882,
"file_path": "results/Huggy2/Huggy/Huggy-1199882.onnx",
"reward": 3.9836307017008465,
"creation_time": 1745480547.7430086,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199882.pt"
]
},
{
"steps": 1399956,
"file_path": "results/Huggy2/Huggy/Huggy-1399956.onnx",
"reward": 1.0369269847869873,
"creation_time": 1745480790.8853345,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399956.pt"
]
},
{
"steps": 1599932,
"file_path": "results/Huggy2/Huggy/Huggy-1599932.onnx",
"reward": 3.9398850939490577,
"creation_time": 1745481031.9258144,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599932.pt"
]
},
{
"steps": 1799940,
"file_path": "results/Huggy2/Huggy/Huggy-1799940.onnx",
"reward": 3.6711770854890347,
"creation_time": 1745481279.5288718,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799940.pt"
]
},
{
"steps": 1999996,
"file_path": "results/Huggy2/Huggy/Huggy-1999996.onnx",
"reward": 3.707334518432617,
"creation_time": 1745481527.670291,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999996.pt"
]
},
{
"steps": 2000050,
"file_path": "results/Huggy2/Huggy/Huggy-2000050.onnx",
"reward": 3.601493406295776,
"creation_time": 1745481527.7795215,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
],
"final_checkpoint": {
"steps": 2000050,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.601493406295776,
"creation_time": 1745481527.7795215,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000050.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}