ppo-Huggy / run_logs /training_status.json
veluchs's picture
Huggy
6f65537
{
"Huggy": {
"checkpoints": [
{
"steps": 199729,
"file_path": "results/Huggy/Huggy/Huggy-199729.onnx",
"reward": 3.5578726090368678,
"creation_time": 1689170774.517143,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199729.pt"
]
},
{
"steps": 399958,
"file_path": "results/Huggy/Huggy/Huggy-399958.onnx",
"reward": 3.701661121554491,
"creation_time": 1689171077.8441598,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399958.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy/Huggy/Huggy-599989.onnx",
"reward": 4.163847769460371,
"creation_time": 1689171387.5742395,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799978,
"file_path": "results/Huggy/Huggy/Huggy-799978.onnx",
"reward": 3.9477223137227617,
"creation_time": 1689171693.1004055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799978.pt"
]
},
{
"steps": 999983,
"file_path": "results/Huggy/Huggy/Huggy-999983.onnx",
"reward": 4.103775192902782,
"creation_time": 1689172003.5680308,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999983.pt"
]
},
{
"steps": 1199959,
"file_path": "results/Huggy/Huggy/Huggy-1199959.onnx",
"reward": 3.8280988720155533,
"creation_time": 1689172316.8792338,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199959.pt"
]
},
{
"steps": 1399965,
"file_path": "results/Huggy/Huggy/Huggy-1399965.onnx",
"reward": 3.775364717021259,
"creation_time": 1689172626.0894277,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399965.pt"
]
},
{
"steps": 1599933,
"file_path": "results/Huggy/Huggy/Huggy-1599933.onnx",
"reward": 3.704885561525086,
"creation_time": 1689172939.7505698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599933.pt"
]
},
{
"steps": 1799968,
"file_path": "results/Huggy/Huggy/Huggy-1799968.onnx",
"reward": 3.7859173481722914,
"creation_time": 1689173247.381962,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799968.pt"
]
},
{
"steps": 1999990,
"file_path": "results/Huggy/Huggy/Huggy-1999990.onnx",
"reward": 3.8608374247948327,
"creation_time": 1689173557.1757643,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999990.pt"
]
},
{
"steps": 2000086,
"file_path": "results/Huggy/Huggy/Huggy-2000086.onnx",
"reward": 3.8904155778884886,
"creation_time": 1689173557.3213437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000086.pt"
]
}
],
"final_checkpoint": {
"steps": 2000086,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.8904155778884886,
"creation_time": 1689173557.3213437,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000086.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}