ppo-Huggy / run_logs /training_status.json
kapilkd13's picture
Huggy
73f8a2a
{
"Huggy": {
"checkpoints": [
{
"steps": 199935,
"file_path": "results/Huggy/Huggy/Huggy-199935.onnx",
"reward": 3.583973206579685,
"creation_time": 1677310382.7908947,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199935.pt"
]
},
{
"steps": 399704,
"file_path": "results/Huggy/Huggy/Huggy-399704.onnx",
"reward": 3.5897756062055888,
"creation_time": 1677310650.7205553,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399704.pt"
]
},
{
"steps": 599901,
"file_path": "results/Huggy/Huggy/Huggy-599901.onnx",
"reward": 3.458273771931143,
"creation_time": 1677310917.9408052,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599901.pt"
]
},
{
"steps": 799990,
"file_path": "results/Huggy/Huggy/Huggy-799990.onnx",
"reward": 3.8354792627529126,
"creation_time": 1677311182.5564823,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799990.pt"
]
},
{
"steps": 999851,
"file_path": "results/Huggy/Huggy/Huggy-999851.onnx",
"reward": 3.86460993917925,
"creation_time": 1677311449.758386,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999851.pt"
]
},
{
"steps": 1199940,
"file_path": "results/Huggy/Huggy/Huggy-1199940.onnx",
"reward": 4.13003698679117,
"creation_time": 1677311717.6871896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199940.pt"
]
},
{
"steps": 1399778,
"file_path": "results/Huggy/Huggy/Huggy-1399778.onnx",
"reward": 3.7933087022258687,
"creation_time": 1677311979.9712222,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399778.pt"
]
},
{
"steps": 1599999,
"file_path": "results/Huggy/Huggy/Huggy-1599999.onnx",
"reward": 3.7216608028258045,
"creation_time": 1677312252.0679307,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599999.pt"
]
},
{
"steps": 1799991,
"file_path": "results/Huggy/Huggy/Huggy-1799991.onnx",
"reward": 3.8816597064336142,
"creation_time": 1677312518.9668562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799991.pt"
]
},
{
"steps": 1999998,
"file_path": "results/Huggy/Huggy/Huggy-1999998.onnx",
"reward": 4.244248999489678,
"creation_time": 1677312792.7176952,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999998.pt"
]
},
{
"steps": 2000026,
"file_path": "results/Huggy/Huggy/Huggy-2000026.onnx",
"reward": 4.147207260131836,
"creation_time": 1677312792.8385751,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000026.pt"
]
}
],
"final_checkpoint": {
"steps": 2000026,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.147207260131836,
"creation_time": 1677312792.8385751,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000026.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}