ppo-Huggy / run_logs /training_status.json
jayshim's picture
Huggy
47c4a09
{
"Huggy": {
"checkpoints": [
{
"steps": 199850,
"file_path": "results/Huggy/Huggy/Huggy-199850.onnx",
"reward": 3.375360616107485,
"creation_time": 1672038763.037155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199850.pt"
]
},
{
"steps": 399933,
"file_path": "results/Huggy/Huggy/Huggy-399933.onnx",
"reward": 3.604273084231785,
"creation_time": 1672038995.7988734,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399933.pt"
]
},
{
"steps": 599940,
"file_path": "results/Huggy/Huggy/Huggy-599940.onnx",
"reward": 3.7428865227206,
"creation_time": 1672039233.8554168,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599940.pt"
]
},
{
"steps": 799988,
"file_path": "results/Huggy/Huggy/Huggy-799988.onnx",
"reward": 3.8502871905054366,
"creation_time": 1672039466.9458356,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799988.pt"
]
},
{
"steps": 999582,
"file_path": "results/Huggy/Huggy/Huggy-999582.onnx",
"reward": 3.8756543789591107,
"creation_time": 1672039703.8662312,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999582.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy/Huggy/Huggy-1199974.onnx",
"reward": 3.9265168758287823,
"creation_time": 1672039941.0310206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399987,
"file_path": "results/Huggy/Huggy/Huggy-1399987.onnx",
"reward": 4.378022464838895,
"creation_time": 1672040178.2576876,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399987.pt"
]
},
{
"steps": 1599563,
"file_path": "results/Huggy/Huggy/Huggy-1599563.onnx",
"reward": 3.9663967262614857,
"creation_time": 1672040410.7795713,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599563.pt"
]
},
{
"steps": 1799996,
"file_path": "results/Huggy/Huggy/Huggy-1799996.onnx",
"reward": 3.7359127564863726,
"creation_time": 1672040646.8872993,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799996.pt"
]
},
{
"steps": 1999961,
"file_path": "results/Huggy/Huggy/Huggy-1999961.onnx",
"reward": 3.546050171852112,
"creation_time": 1672040881.2619207,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999961.pt"
]
},
{
"steps": 2000126,
"file_path": "results/Huggy/Huggy/Huggy-2000126.onnx",
"reward": 3.6529820561408997,
"creation_time": 1672040881.3931465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
],
"final_checkpoint": {
"steps": 2000126,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6529820561408997,
"creation_time": 1672040881.3931465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000126.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}