ppo-Huggy / run_logs /training_status.json
sderev's picture
Huggy
985a081
{
"Huggy": {
"checkpoints": [
{
"steps": 199977,
"file_path": "results/Huggy/Huggy/Huggy-199977.onnx",
"reward": 3.5056398122206978,
"creation_time": 1673614657.6651564,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199977.pt"
]
},
{
"steps": 399916,
"file_path": "results/Huggy/Huggy/Huggy-399916.onnx",
"reward": 3.5594681941545927,
"creation_time": 1673614923.85521,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399916.pt"
]
},
{
"steps": 599939,
"file_path": "results/Huggy/Huggy/Huggy-599939.onnx",
"reward": 4.875126779079437,
"creation_time": 1673615193.8139038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599939.pt"
]
},
{
"steps": 799865,
"file_path": "results/Huggy/Huggy/Huggy-799865.onnx",
"reward": 3.9665697085229974,
"creation_time": 1673615456.2462986,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799865.pt"
]
},
{
"steps": 999969,
"file_path": "results/Huggy/Huggy/Huggy-999969.onnx",
"reward": 4.072159438574014,
"creation_time": 1673615725.471059,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999969.pt"
]
},
{
"steps": 1199934,
"file_path": "results/Huggy/Huggy/Huggy-1199934.onnx",
"reward": 3.4800552066528434,
"creation_time": 1673615993.1546626,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199934.pt"
]
},
{
"steps": 1399979,
"file_path": "results/Huggy/Huggy/Huggy-1399979.onnx",
"reward": 4.0808023817626085,
"creation_time": 1673616260.0746121,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399979.pt"
]
},
{
"steps": 1599958,
"file_path": "results/Huggy/Huggy/Huggy-1599958.onnx",
"reward": 3.844497012895542,
"creation_time": 1673616535.7480855,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599958.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 4.168201719011579,
"creation_time": 1673616805.2045038,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 4.015026225285097,
"creation_time": 1673617078.4134939,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000088,
"file_path": "results/Huggy/Huggy/Huggy-2000088.onnx",
"reward": 4.0525958087709215,
"creation_time": 1673617078.553433,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000088.pt"
]
}
],
"final_checkpoint": {
"steps": 2000088,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.0525958087709215,
"creation_time": 1673617078.553433,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000088.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}