ppo-Huggy / run_logs /training_status.json
torreygooch's picture
Huggy
6a9138a
{
"Huggy": {
"checkpoints": [
{
"steps": 199857,
"file_path": "results/Huggy/Huggy/Huggy-199857.onnx",
"reward": 3.375037940986016,
"creation_time": 1681812512.419343,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199857.pt"
]
},
{
"steps": 399953,
"file_path": "results/Huggy/Huggy/Huggy-399953.onnx",
"reward": 3.2181425061944413,
"creation_time": 1681812752.6153572,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399953.pt"
]
},
{
"steps": 599938,
"file_path": "results/Huggy/Huggy/Huggy-599938.onnx",
"reward": 3.620081522248008,
"creation_time": 1681812984.0835917,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599938.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy/Huggy/Huggy-799989.onnx",
"reward": 3.8245625664426393,
"creation_time": 1681813223.5452707,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999929,
"file_path": "results/Huggy/Huggy/Huggy-999929.onnx",
"reward": 3.730108034341855,
"creation_time": 1681813457.556722,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999929.pt"
]
},
{
"steps": 1199942,
"file_path": "results/Huggy/Huggy/Huggy-1199942.onnx",
"reward": 4.073397161913853,
"creation_time": 1681813688.2764843,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199942.pt"
]
},
{
"steps": 1399709,
"file_path": "results/Huggy/Huggy/Huggy-1399709.onnx",
"reward": 3.765204880258133,
"creation_time": 1681813913.8875706,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399709.pt"
]
},
{
"steps": 1599300,
"file_path": "results/Huggy/Huggy/Huggy-1599300.onnx",
"reward": 3.7454000593800294,
"creation_time": 1681814147.698589,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599300.pt"
]
},
{
"steps": 1799674,
"file_path": "results/Huggy/Huggy/Huggy-1799674.onnx",
"reward": 3.9245371049100704,
"creation_time": 1681814384.8905332,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799674.pt"
]
},
{
"steps": 1999935,
"file_path": "results/Huggy/Huggy/Huggy-1999935.onnx",
"reward": 4.325511813163757,
"creation_time": 1681814620.6064541,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999935.pt"
]
},
{
"steps": 2000048,
"file_path": "results/Huggy/Huggy/Huggy-2000048.onnx",
"reward": 4.362816122600011,
"creation_time": 1681814620.7342105,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
],
"final_checkpoint": {
"steps": 2000048,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.362816122600011,
"creation_time": 1681814620.7342105,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000048.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}