ppo-Huggy / run_logs /training_status.json
dgrachev's picture
Huggy
d7a43ad verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199953,
"file_path": "results/Huggy2/Huggy/Huggy-199953.onnx",
"reward": 3.4816637568070856,
"creation_time": 1716290814.3184013,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199953.pt"
]
},
{
"steps": 399943,
"file_path": "results/Huggy2/Huggy/Huggy-399943.onnx",
"reward": 3.432319795233863,
"creation_time": 1716291042.3390515,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399943.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy2/Huggy/Huggy-599938.onnx",
"reward": 3.242902921004729,
"creation_time": 1716291279.2758663,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799991,
"file_path": "results/Huggy2/Huggy/Huggy-799991.onnx",
"reward": 3.836074968179067,
"creation_time": 1716291515.661688,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799991.pt"
]
},
{
"steps": 999979,
"file_path": "results/Huggy2/Huggy/Huggy-999979.onnx",
"reward": 3.527172193995544,
"creation_time": 1716291750.6587856,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999979.pt"
]
},
{
"steps": 1199937,
"file_path": "results/Huggy2/Huggy/Huggy-1199937.onnx",
"reward": 4.033243893384934,
"creation_time": 1716291989.815051,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199937.pt"
]
},
{
"steps": 1399983,
"file_path": "results/Huggy2/Huggy/Huggy-1399983.onnx",
"reward": 3.876838921991169,
"creation_time": 1716292221.3177876,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399983.pt"
]
},
{
"steps": 1599959,
"file_path": "results/Huggy2/Huggy/Huggy-1599959.onnx",
"reward": 3.7645304148847405,
"creation_time": 1716292465.8619492,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599959.pt"
]
},
{
"steps": 1799971,
"file_path": "results/Huggy2/Huggy/Huggy-1799971.onnx",
"reward": 3.6358894857008064,
"creation_time": 1716292715.5757043,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799971.pt"
]
},
{
"steps": 1999966,
"file_path": "results/Huggy2/Huggy/Huggy-1999966.onnx",
"reward": 3.7335299253463745,
"creation_time": 1716292958.332333,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999966.pt"
]
},
{
"steps": 2000088,
"file_path": "results/Huggy2/Huggy/Huggy-2000088.onnx",
"reward": 4.023257277228615,
"creation_time": 1716292958.5175753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000088.pt"
]
}
],
"final_checkpoint": {
"steps": 2000088,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.023257277228615,
"creation_time": 1716292958.5175753,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000088.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}