ppo-Huggy / run_logs /training_status.json
nagupv's picture
Huggy
d703feb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199876,
"file_path": "results/Huggy2/Huggy/Huggy-199876.onnx",
"reward": 3.3409484227498374,
"creation_time": 1736163991.5988257,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199876.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy2/Huggy/Huggy-399967.onnx",
"reward": 3.9672607476749118,
"creation_time": 1736164234.142965,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599857,
"file_path": "results/Huggy2/Huggy/Huggy-599857.onnx",
"reward": 3.711597337442286,
"creation_time": 1736164482.8131902,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599857.pt"
]
},
{
"steps": 799965,
"file_path": "results/Huggy2/Huggy/Huggy-799965.onnx",
"reward": 3.917381104381605,
"creation_time": 1736164727.2464588,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799965.pt"
]
},
{
"steps": 999948,
"file_path": "results/Huggy2/Huggy/Huggy-999948.onnx",
"reward": 3.7294881954193113,
"creation_time": 1736164973.996798,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999948.pt"
]
},
{
"steps": 1199956,
"file_path": "results/Huggy2/Huggy/Huggy-1199956.onnx",
"reward": 4.131281704738222,
"creation_time": 1736165219.1498365,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199956.pt"
]
},
{
"steps": 1399947,
"file_path": "results/Huggy2/Huggy/Huggy-1399947.onnx",
"reward": 3.89090888053885,
"creation_time": 1736165461.3062475,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399947.pt"
]
},
{
"steps": 1599982,
"file_path": "results/Huggy2/Huggy/Huggy-1599982.onnx",
"reward": 3.6917679396367844,
"creation_time": 1736165713.4378712,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599982.pt"
]
},
{
"steps": 1799957,
"file_path": "results/Huggy2/Huggy/Huggy-1799957.onnx",
"reward": 4.007721153932197,
"creation_time": 1736165962.598678,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799957.pt"
]
},
{
"steps": 1999901,
"file_path": "results/Huggy2/Huggy/Huggy-1999901.onnx",
"reward": 4.3848587572574615,
"creation_time": 1736166212.0210867,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999901.pt"
]
},
{
"steps": 2000004,
"file_path": "results/Huggy2/Huggy/Huggy-2000004.onnx",
"reward": 4.394826375204941,
"creation_time": 1736166212.134684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000004.pt"
]
}
],
"final_checkpoint": {
"steps": 2000004,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.394826375204941,
"creation_time": 1736166212.134684,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000004.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}