ppo-Huggy / run_logs /training_status.json
Isaacgv's picture
Huggy
4a6370a
{
"Huggy": {
"checkpoints": [
{
"steps": 199565,
"file_path": "results/Huggy/Huggy/Huggy-199565.onnx",
"reward": 3.2410511355246268,
"creation_time": 1677069060.9222677,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199565.pt"
]
},
{
"steps": 399947,
"file_path": "results/Huggy/Huggy/Huggy-399947.onnx",
"reward": 4.049909230262514,
"creation_time": 1677069296.7093797,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399947.pt"
]
},
{
"steps": 599995,
"file_path": "results/Huggy/Huggy/Huggy-599995.onnx",
"reward": 4.0757691209966485,
"creation_time": 1677069538.3067517,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599995.pt"
]
},
{
"steps": 799924,
"file_path": "results/Huggy/Huggy/Huggy-799924.onnx",
"reward": 3.7611293122172356,
"creation_time": 1677069775.937952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799924.pt"
]
},
{
"steps": 999909,
"file_path": "results/Huggy/Huggy/Huggy-999909.onnx",
"reward": 3.7261901364397647,
"creation_time": 1677070015.2928002,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999909.pt"
]
},
{
"steps": 1199999,
"file_path": "results/Huggy/Huggy/Huggy-1199999.onnx",
"reward": 3.852706298075224,
"creation_time": 1677070257.6429884,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199999.pt"
]
},
{
"steps": 1399929,
"file_path": "results/Huggy/Huggy/Huggy-1399929.onnx",
"reward": 4.140577360987663,
"creation_time": 1677070498.5312011,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399929.pt"
]
},
{
"steps": 1599974,
"file_path": "results/Huggy/Huggy/Huggy-1599974.onnx",
"reward": 4.076476266728112,
"creation_time": 1677070737.539922,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599974.pt"
]
},
{
"steps": 1799987,
"file_path": "results/Huggy/Huggy/Huggy-1799987.onnx",
"reward": 4.278368833636449,
"creation_time": 1677070981.21614,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799987.pt"
]
},
{
"steps": 1999996,
"file_path": "results/Huggy/Huggy/Huggy-1999996.onnx",
"reward": 4.198466141407306,
"creation_time": 1677071223.4863915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999996.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy/Huggy/Huggy-2000073.onnx",
"reward": 4.225906149907545,
"creation_time": 1677071223.668134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.225906149907545,
"creation_time": 1677071223.668134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}