ppo-Huggy / run_logs /training_status.json
Laide
Huggy
040646b
{
"Huggy": {
"checkpoints": [
{
"steps": 199747,
"file_path": "results/Huggy/Huggy/Huggy-199747.onnx",
"reward": 3.5014740347862245,
"creation_time": 1685527712.2734184,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199747.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 3.974085302495245,
"creation_time": 1685527936.9585311,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599966,
"file_path": "results/Huggy/Huggy/Huggy-599966.onnx",
"reward": 3.710236606853349,
"creation_time": 1685528169.8676417,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599966.pt"
]
},
{
"steps": 799999,
"file_path": "results/Huggy/Huggy/Huggy-799999.onnx",
"reward": 3.6526134160848764,
"creation_time": 1685528397.6446915,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799999.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.725884562333425,
"creation_time": 1685528630.9963589,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199997,
"file_path": "results/Huggy/Huggy/Huggy-1199997.onnx",
"reward": 3.9104337055574763,
"creation_time": 1685528864.51658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199997.pt"
]
},
{
"steps": 1399974,
"file_path": "results/Huggy/Huggy/Huggy-1399974.onnx",
"reward": 3.7658301472663878,
"creation_time": 1685529099.9895134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399974.pt"
]
},
{
"steps": 1599950,
"file_path": "results/Huggy/Huggy/Huggy-1599950.onnx",
"reward": 3.987352361375431,
"creation_time": 1685529327.1366687,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599950.pt"
]
},
{
"steps": 1799916,
"file_path": "results/Huggy/Huggy/Huggy-1799916.onnx",
"reward": 3.9247028848508023,
"creation_time": 1685529558.8889556,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799916.pt"
]
},
{
"steps": 1999962,
"file_path": "results/Huggy/Huggy/Huggy-1999962.onnx",
"reward": 4.166998260641751,
"creation_time": 1685529794.479971,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999962.pt"
]
},
{
"steps": 2000007,
"file_path": "results/Huggy/Huggy/Huggy-2000007.onnx",
"reward": 4.159590410219656,
"creation_time": 1685529794.600326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
],
"final_checkpoint": {
"steps": 2000007,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.159590410219656,
"creation_time": 1685529794.600326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000007.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}