ppo-Huggy2 / run_logs /training_status.json
anthonyx's picture
Huggy
a5246d6
{
"Huggy": {
"checkpoints": [
{
"steps": 199998,
"file_path": "results/Huggy/Huggy/Huggy-199998.onnx",
"reward": 3.516002776907451,
"creation_time": 1679386891.3497944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199998.pt"
]
},
{
"steps": 399871,
"file_path": "results/Huggy/Huggy/Huggy-399871.onnx",
"reward": 4.001079869089705,
"creation_time": 1679387137.1586773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399871.pt"
]
},
{
"steps": 599911,
"file_path": "results/Huggy/Huggy/Huggy-599911.onnx",
"reward": 3.9673686686315035,
"creation_time": 1679387385.255741,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599911.pt"
]
},
{
"steps": 799928,
"file_path": "results/Huggy/Huggy/Huggy-799928.onnx",
"reward": 3.642048556071061,
"creation_time": 1679387631.1222007,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799928.pt"
]
},
{
"steps": 999974,
"file_path": "results/Huggy/Huggy/Huggy-999974.onnx",
"reward": 3.8495456802268184,
"creation_time": 1679387884.8071315,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999974.pt"
]
},
{
"steps": 1199977,
"file_path": "results/Huggy/Huggy/Huggy-1199977.onnx",
"reward": 3.8499425848325095,
"creation_time": 1679388135.0700088,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199977.pt"
]
},
{
"steps": 1399934,
"file_path": "results/Huggy/Huggy/Huggy-1399934.onnx",
"reward": 3.787082631020777,
"creation_time": 1679388380.1893806,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399934.pt"
]
},
{
"steps": 1599945,
"file_path": "results/Huggy/Huggy/Huggy-1599945.onnx",
"reward": 3.9733718483037848,
"creation_time": 1679388630.4783528,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599945.pt"
]
},
{
"steps": 1799943,
"file_path": "results/Huggy/Huggy/Huggy-1799943.onnx",
"reward": 3.853925788739942,
"creation_time": 1679388884.3387744,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799943.pt"
]
},
{
"steps": 1999895,
"file_path": "results/Huggy/Huggy/Huggy-1999895.onnx",
"reward": 3.945011840926276,
"creation_time": 1679389132.9496098,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999895.pt"
]
},
{
"steps": 2000018,
"file_path": "results/Huggy/Huggy/Huggy-2000018.onnx",
"reward": 4.009646241153989,
"creation_time": 1679389134.0431712,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
],
"final_checkpoint": {
"steps": 2000018,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.009646241153989,
"creation_time": 1679389134.0431712,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000018.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}