ppo-Huggy / run_logs /training_status.json
onangeko's picture
Huggy
9935df8
{
"Huggy": {
"checkpoints": [
{
"steps": 199882,
"file_path": "results/Huggy/Huggy/Huggy-199882.onnx",
"reward": 3.3982566576155406,
"creation_time": 1699004460.748323,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199882.pt"
]
},
{
"steps": 399392,
"file_path": "results/Huggy/Huggy/Huggy-399392.onnx",
"reward": 4.066862491460947,
"creation_time": 1699004698.4287474,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399392.pt"
]
},
{
"steps": 599887,
"file_path": "results/Huggy/Huggy/Huggy-599887.onnx",
"reward": 4.520594408637599,
"creation_time": 1699004943.7375364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599887.pt"
]
},
{
"steps": 799935,
"file_path": "results/Huggy/Huggy/Huggy-799935.onnx",
"reward": 3.7026110442166917,
"creation_time": 1699005195.5628638,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799935.pt"
]
},
{
"steps": 999729,
"file_path": "results/Huggy/Huggy/Huggy-999729.onnx",
"reward": 3.6401535392768922,
"creation_time": 1699005449.501357,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999729.pt"
]
},
{
"steps": 1199998,
"file_path": "results/Huggy/Huggy/Huggy-1199998.onnx",
"reward": 3.8705578530338447,
"creation_time": 1699005708.074153,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199998.pt"
]
},
{
"steps": 1399746,
"file_path": "results/Huggy/Huggy/Huggy-1399746.onnx",
"reward": 3.5904228094205335,
"creation_time": 1699005959.1673095,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399746.pt"
]
},
{
"steps": 1599970,
"file_path": "results/Huggy/Huggy/Huggy-1599970.onnx",
"reward": 3.764733587512533,
"creation_time": 1699006213.7873359,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599970.pt"
]
},
{
"steps": 1799540,
"file_path": "results/Huggy/Huggy/Huggy-1799540.onnx",
"reward": 3.700228795811937,
"creation_time": 1699006466.9444366,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799540.pt"
]
},
{
"steps": 1999733,
"file_path": "results/Huggy/Huggy/Huggy-1999733.onnx",
"reward": 3.8919130298826428,
"creation_time": 1699006722.9318755,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999733.pt"
]
},
{
"steps": 2000483,
"file_path": "results/Huggy/Huggy/Huggy-2000483.onnx",
"reward": 3.627224577324731,
"creation_time": 1699006723.062416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000483.pt"
]
}
],
"final_checkpoint": {
"steps": 2000483,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.627224577324731,
"creation_time": 1699006723.062416,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000483.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}