HuggyPPO / run_logs /training_status.json
Bytte's picture
Huggy
3cc2d6e
{
"Huggy": {
"checkpoints": [
{
"steps": 199954,
"file_path": "results/Huggy/Huggy/Huggy-199954.onnx",
"reward": 3.68564664040293,
"creation_time": 1694257364.819234,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199954.pt"
]
},
{
"steps": 399928,
"file_path": "results/Huggy/Huggy/Huggy-399928.onnx",
"reward": 4.00441423827602,
"creation_time": 1694257606.1672924,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399928.pt"
]
},
{
"steps": 599895,
"file_path": "results/Huggy/Huggy/Huggy-599895.onnx",
"reward": 4.322712289435523,
"creation_time": 1694257849.3220706,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599895.pt"
]
},
{
"steps": 799942,
"file_path": "results/Huggy/Huggy/Huggy-799942.onnx",
"reward": 3.904157200540815,
"creation_time": 1694258095.2857296,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799942.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy/Huggy/Huggy-999961.onnx",
"reward": 3.8093985723915385,
"creation_time": 1694258353.7025526,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199945,
"file_path": "results/Huggy/Huggy/Huggy-1199945.onnx",
"reward": 3.9988284819368003,
"creation_time": 1694258612.6874936,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199945.pt"
]
},
{
"steps": 1399935,
"file_path": "results/Huggy/Huggy/Huggy-1399935.onnx",
"reward": 3.867561220888998,
"creation_time": 1694258865.7332997,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399935.pt"
]
},
{
"steps": 1599949,
"file_path": "results/Huggy/Huggy/Huggy-1599949.onnx",
"reward": 3.89179527759552,
"creation_time": 1694259121.49521,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599949.pt"
]
},
{
"steps": 1799940,
"file_path": "results/Huggy/Huggy/Huggy-1799940.onnx",
"reward": 3.589960749101157,
"creation_time": 1694259376.1952653,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799940.pt"
]
},
{
"steps": 1999987,
"file_path": "results/Huggy/Huggy/Huggy-1999987.onnx",
"reward": 3.8504336756818436,
"creation_time": 1694259625.6927917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999987.pt"
]
},
{
"steps": 2000143,
"file_path": "results/Huggy/Huggy/Huggy-2000143.onnx",
"reward": 3.8611771753856114,
"creation_time": 1694259625.8225462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000143.pt"
]
}
],
"final_checkpoint": {
"steps": 2000143,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8611771753856114,
"creation_time": 1694259625.8225462,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000143.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}