ppo-Huggy / run_logs /training_status.json
zapsdcn's picture
Huggy
1e792eb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199934,
"file_path": "results/Huggy3/Huggy/Huggy-199934.onnx",
"reward": 3.213835142160717,
"creation_time": 1743745691.5029094,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-199934.pt"
]
},
{
"steps": 399945,
"file_path": "results/Huggy3/Huggy/Huggy-399945.onnx",
"reward": 3.6411960154283243,
"creation_time": 1743745939.100669,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-399945.pt"
]
},
{
"steps": 599894,
"file_path": "results/Huggy3/Huggy/Huggy-599894.onnx",
"reward": 3.890092605038693,
"creation_time": 1743746191.6243782,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-599894.pt"
]
},
{
"steps": 799906,
"file_path": "results/Huggy3/Huggy/Huggy-799906.onnx",
"reward": 3.8114525922754194,
"creation_time": 1743746444.622779,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-799906.pt"
]
},
{
"steps": 999633,
"file_path": "results/Huggy3/Huggy/Huggy-999633.onnx",
"reward": 3.9443247358004254,
"creation_time": 1743746694.463577,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-999633.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy3/Huggy/Huggy-1199999.onnx",
"reward": 4.191964429967544,
"creation_time": 1743746952.7418735,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399968,
"file_path": "results/Huggy3/Huggy/Huggy-1399968.onnx",
"reward": 3.9308825839222887,
"creation_time": 1743747211.2642758,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-1399968.pt"
]
},
{
"steps": 1599939,
"file_path": "results/Huggy3/Huggy/Huggy-1599939.onnx",
"reward": 4.046548350842413,
"creation_time": 1743747468.2410834,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-1599939.pt"
]
},
{
"steps": 1799906,
"file_path": "results/Huggy3/Huggy/Huggy-1799906.onnx",
"reward": 3.736314087842418,
"creation_time": 1743747721.2096035,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-1799906.pt"
]
},
{
"steps": 1999922,
"file_path": "results/Huggy3/Huggy/Huggy-1999922.onnx",
"reward": 4.097079536249471,
"creation_time": 1743747971.357888,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-1999922.pt"
]
},
{
"steps": 2000017,
"file_path": "results/Huggy3/Huggy/Huggy-2000017.onnx",
"reward": 4.125560157678344,
"creation_time": 1743747971.540116,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-2000017.pt"
]
}
],
"final_checkpoint": {
"steps": 2000017,
"file_path": "results/Huggy3/Huggy.onnx",
"reward": 4.125560157678344,
"creation_time": 1743747971.540116,
"auxillary_file_paths": [
"results/Huggy3/Huggy/Huggy-2000017.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}