Huggy-PPO / run_logs /training_status.json
voxxer's picture
Huggy
0850d30
{
"Huggy": {
"checkpoints": [
{
"steps": 199752,
"file_path": "results/Huggy/Huggy/Huggy-199752.onnx",
"reward": 3.384264365091162,
"creation_time": 1691339213.7559648,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199752.pt"
]
},
{
"steps": 399770,
"file_path": "results/Huggy/Huggy/Huggy-399770.onnx",
"reward": 3.6491850144935376,
"creation_time": 1691339472.185223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399770.pt"
]
},
{
"steps": 599878,
"file_path": "results/Huggy/Huggy/Huggy-599878.onnx",
"reward": 4.017877765323805,
"creation_time": 1691339730.059523,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599878.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy/Huggy/Huggy-799968.onnx",
"reward": 3.8644787856702054,
"creation_time": 1691339983.5450342,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy/Huggy/Huggy-999934.onnx",
"reward": 3.930796653032303,
"creation_time": 1691340245.1772575,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 4.112244291885479,
"creation_time": 1691340505.5399773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399515,
"file_path": "results/Huggy/Huggy/Huggy-1399515.onnx",
"reward": 4.964050507545471,
"creation_time": 1691340770.2312052,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399515.pt"
]
},
{
"steps": 1599891,
"file_path": "results/Huggy/Huggy/Huggy-1599891.onnx",
"reward": 3.649055480030534,
"creation_time": 1691341033.3241627,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599891.pt"
]
},
{
"steps": 1799940,
"file_path": "results/Huggy/Huggy/Huggy-1799940.onnx",
"reward": 4.163141065522244,
"creation_time": 1691341301.2738886,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799940.pt"
]
},
{
"steps": 1999931,
"file_path": "results/Huggy/Huggy/Huggy-1999931.onnx",
"reward": 4.199622951055828,
"creation_time": 1691341560.0790877,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999931.pt"
]
},
{
"steps": 2000002,
"file_path": "results/Huggy/Huggy/Huggy-2000002.onnx",
"reward": 4.129377343715766,
"creation_time": 1691341560.2873302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
],
"final_checkpoint": {
"steps": 2000002,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.129377343715766,
"creation_time": 1691341560.2873302,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000002.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}