ppo-Huggy / run_logs /training_status.json
ygaci's picture
Huggy
c1626d3
{
"Huggy": {
"checkpoints": [
{
"steps": 199960,
"file_path": "results/Huggy/Huggy/Huggy-199960.onnx",
"reward": 3.2991333639982976,
"creation_time": 1694871925.9594223,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199960.pt"
]
},
{
"steps": 399924,
"file_path": "results/Huggy/Huggy/Huggy-399924.onnx",
"reward": 3.768667444334192,
"creation_time": 1694872167.1297402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399924.pt"
]
},
{
"steps": 599951,
"file_path": "results/Huggy/Huggy/Huggy-599951.onnx",
"reward": 3.729727692074246,
"creation_time": 1694872408.368379,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599951.pt"
]
},
{
"steps": 799976,
"file_path": "results/Huggy/Huggy/Huggy-799976.onnx",
"reward": 3.6843229833469597,
"creation_time": 1694872650.226978,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799976.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy/Huggy/Huggy-999990.onnx",
"reward": 3.562490618044092,
"creation_time": 1694872894.0513027,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 3.9943452288707095,
"creation_time": 1694873147.4005651,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399960,
"file_path": "results/Huggy/Huggy/Huggy-1399960.onnx",
"reward": 3.954549390052441,
"creation_time": 1694873396.711894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399960.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy/Huggy/Huggy-1599976.onnx",
"reward": 3.9973691335091224,
"creation_time": 1694873651.149114,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799328,
"file_path": "results/Huggy/Huggy/Huggy-1799328.onnx",
"reward": 4.019095474968151,
"creation_time": 1694873899.615101,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799328.pt"
]
},
{
"steps": 1999965,
"file_path": "results/Huggy/Huggy/Huggy-1999965.onnx",
"reward": 4.322204172611237,
"creation_time": 1694874150.8713813,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999965.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 4.24012913125934,
"creation_time": 1694874151.0569,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.24012913125934,
"creation_time": 1694874151.0569,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}