ppo-Huggy / run_logs /training_status.json
stillerman's picture
Huggy
dbccc3b
{
"Huggy": {
"checkpoints": [
{
"steps": 199924,
"file_path": "results/Huggy/Huggy/Huggy-199924.onnx",
"reward": 3.428200879404622,
"creation_time": 1695325254.5249803,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199924.pt"
]
},
{
"steps": 399919,
"file_path": "results/Huggy/Huggy/Huggy-399919.onnx",
"reward": 3.6582242873998787,
"creation_time": 1695325500.294081,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399919.pt"
]
},
{
"steps": 599995,
"file_path": "results/Huggy/Huggy/Huggy-599995.onnx",
"reward": 4.0606852521498995,
"creation_time": 1695325751.8437681,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599995.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.9055897749440613,
"creation_time": 1695326001.486691,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999869,
"file_path": "results/Huggy/Huggy/Huggy-999869.onnx",
"reward": 3.8312048428463488,
"creation_time": 1695326255.9172254,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999869.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy/Huggy/Huggy-1199983.onnx",
"reward": 3.339148203531901,
"creation_time": 1695326512.5407119,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399889,
"file_path": "results/Huggy/Huggy/Huggy-1399889.onnx",
"reward": 3.8210459900675,
"creation_time": 1695326764.677841,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399889.pt"
]
},
{
"steps": 1599649,
"file_path": "results/Huggy/Huggy/Huggy-1599649.onnx",
"reward": 3.901474042049307,
"creation_time": 1695327022.679234,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599649.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 3.8017283095554872,
"creation_time": 1695327282.3109071,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy/Huggy/Huggy-1999977.onnx",
"reward": 4.268156386338747,
"creation_time": 1695327537.4578807,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000077,
"file_path": "results/Huggy/Huggy/Huggy-2000077.onnx",
"reward": 4.275287189653942,
"creation_time": 1695327537.5842607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000077.pt"
]
}
],
"final_checkpoint": {
"steps": 2000077,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.275287189653942,
"creation_time": 1695327537.5842607,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000077.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}