ppo-Huggy / run_logs /training_status.json
acadia611's picture
Huggy
57e5583 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199874,
"file_path": "results/Huggy2/Huggy/Huggy-199874.onnx",
"reward": 3.043856784982501,
"creation_time": 1739163937.6344414,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199874.pt"
]
},
{
"steps": 399891,
"file_path": "results/Huggy2/Huggy/Huggy-399891.onnx",
"reward": 3.468922587002025,
"creation_time": 1739164344.7356296,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399891.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy2/Huggy/Huggy-599962.onnx",
"reward": 2.2807931780815123,
"creation_time": 1739164755.9356558,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799960,
"file_path": "results/Huggy2/Huggy/Huggy-799960.onnx",
"reward": 3.5227458426585563,
"creation_time": 1739165149.8615558,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799960.pt"
]
},
{
"steps": 999917,
"file_path": "results/Huggy2/Huggy/Huggy-999917.onnx",
"reward": 3.4728261693831413,
"creation_time": 1739165555.305393,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999917.pt"
]
},
{
"steps": 1199931,
"file_path": "results/Huggy2/Huggy/Huggy-1199931.onnx",
"reward": 3.6179328981567833,
"creation_time": 1739165961.1971707,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199931.pt"
]
},
{
"steps": 1399989,
"file_path": "results/Huggy2/Huggy/Huggy-1399989.onnx",
"reward": 3.4152319072044057,
"creation_time": 1739166357.6547816,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399989.pt"
]
},
{
"steps": 1599452,
"file_path": "results/Huggy2/Huggy/Huggy-1599452.onnx",
"reward": 3.3768049247984617,
"creation_time": 1739166767.0375915,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599452.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy2/Huggy/Huggy-1799968.onnx",
"reward": 3.9733174883801006,
"creation_time": 1739167189.25055,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999415,
"file_path": "results/Huggy2/Huggy/Huggy-1999415.onnx",
"reward": 3.6457270918665707,
"creation_time": 1739167591.0163524,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999415.pt"
]
},
{
"steps": 2000165,
"file_path": "results/Huggy2/Huggy/Huggy-2000165.onnx",
"reward": 3.6099473853265085,
"creation_time": 1739167591.1653752,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000165.pt"
]
}
],
"final_checkpoint": {
"steps": 2000165,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6099473853265085,
"creation_time": 1739167591.1653752,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000165.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}