ppo-Huggy / run_logs /training_status.json
ZeroEW's picture
Huggy
656040c verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199985,
"file_path": "results/Huggy2/Huggy/Huggy-199985.onnx",
"reward": 3.5953845658472607,
"creation_time": 1743178651.780803,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199985.pt"
]
},
{
"steps": 399991,
"file_path": "results/Huggy2/Huggy/Huggy-399991.onnx",
"reward": 3.591860374094735,
"creation_time": 1743178891.381672,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399991.pt"
]
},
{
"steps": 599925,
"file_path": "results/Huggy2/Huggy/Huggy-599925.onnx",
"reward": 3.1724665448779152,
"creation_time": 1743179133.1837296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599925.pt"
]
},
{
"steps": 799958,
"file_path": "results/Huggy2/Huggy/Huggy-799958.onnx",
"reward": 3.700543732746788,
"creation_time": 1743179373.7364638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799958.pt"
]
},
{
"steps": 999785,
"file_path": "results/Huggy2/Huggy/Huggy-999785.onnx",
"reward": 3.7005946901109485,
"creation_time": 1743179623.497631,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999785.pt"
]
},
{
"steps": 1199884,
"file_path": "results/Huggy2/Huggy/Huggy-1199884.onnx",
"reward": 4.09986062241452,
"creation_time": 1743179870.598037,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199884.pt"
]
},
{
"steps": 1399928,
"file_path": "results/Huggy2/Huggy/Huggy-1399928.onnx",
"reward": null,
"creation_time": 1743180121.560342,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399928.pt"
]
},
{
"steps": 1599994,
"file_path": "results/Huggy2/Huggy/Huggy-1599994.onnx",
"reward": 3.647951064847451,
"creation_time": 1743180363.8745024,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599994.pt"
]
},
{
"steps": 1799947,
"file_path": "results/Huggy2/Huggy/Huggy-1799947.onnx",
"reward": 4.415598630309105,
"creation_time": 1743180620.9939368,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799947.pt"
]
},
{
"steps": 1999365,
"file_path": "results/Huggy2/Huggy/Huggy-1999365.onnx",
"reward": 4.441535826652281,
"creation_time": 1743180906.5774322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999365.pt"
]
},
{
"steps": 2000115,
"file_path": "results/Huggy2/Huggy/Huggy-2000115.onnx",
"reward": 4.2028022184967995,
"creation_time": 1743180906.7565353,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000115.pt"
]
}
],
"final_checkpoint": {
"steps": 2000115,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.2028022184967995,
"creation_time": 1743180906.7565353,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000115.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}