ppo-Huggy / run_logs /training_status.json
ducdo's picture
Huggy
60a17db
{
"Huggy": {
"checkpoints": [
{
"steps": 199927,
"file_path": "results/Huggy/Huggy/Huggy-199927.onnx",
"reward": 3.3465747517697952,
"creation_time": 1671235549.289598,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199927.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy/Huggy/Huggy-399960.onnx",
"reward": 3.587508340945115,
"creation_time": 1671235779.8835502,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599996,
"file_path": "results/Huggy/Huggy/Huggy-599996.onnx",
"reward": 3.7647466233798434,
"creation_time": 1671236014.9122186,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599996.pt"
]
},
{
"steps": 799898,
"file_path": "results/Huggy/Huggy/Huggy-799898.onnx",
"reward": 3.8936769769649313,
"creation_time": 1671236249.0540893,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799898.pt"
]
},
{
"steps": 999985,
"file_path": "results/Huggy/Huggy/Huggy-999985.onnx",
"reward": 3.5728278455877662,
"creation_time": 1671236485.5710585,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999985.pt"
]
},
{
"steps": 1199985,
"file_path": "results/Huggy/Huggy/Huggy-1199985.onnx",
"reward": 3.6523202279257396,
"creation_time": 1671236721.2229831,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199985.pt"
]
},
{
"steps": 1399966,
"file_path": "results/Huggy/Huggy/Huggy-1399966.onnx",
"reward": null,
"creation_time": 1671236957.7779539,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399966.pt"
]
},
{
"steps": 1599796,
"file_path": "results/Huggy/Huggy/Huggy-1599796.onnx",
"reward": 3.6832311313599346,
"creation_time": 1671237193.9341614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599796.pt"
]
},
{
"steps": 1799909,
"file_path": "results/Huggy/Huggy/Huggy-1799909.onnx",
"reward": 3.6794941354057062,
"creation_time": 1671237431.5017326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799909.pt"
]
},
{
"steps": 1999975,
"file_path": "results/Huggy/Huggy/Huggy-1999975.onnx",
"reward": 3.376985788345337,
"creation_time": 1671237665.8099134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999975.pt"
]
},
{
"steps": 2000037,
"file_path": "results/Huggy/Huggy/Huggy-2000037.onnx",
"reward": 3.4545950387653552,
"creation_time": 1671237665.9326818,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000037.pt"
]
}
],
"final_checkpoint": {
"steps": 2000037,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4545950387653552,
"creation_time": 1671237665.9326818,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000037.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}