ppo-Huggy2 / run_logs /training_status.json
harslan's picture
Huggy
916c49b
{
"Huggy": {
"checkpoints": [
{
"steps": 199771,
"file_path": "results/Huggy/Huggy/Huggy-199771.onnx",
"reward": 3.5365383459971502,
"creation_time": 1682373056.080845,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199771.pt"
]
},
{
"steps": 399990,
"file_path": "results/Huggy/Huggy/Huggy-399990.onnx",
"reward": 3.9288962379334467,
"creation_time": 1682373294.4913132,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399990.pt"
]
},
{
"steps": 599988,
"file_path": "results/Huggy/Huggy/Huggy-599988.onnx",
"reward": 4.316064715385437,
"creation_time": 1682373537.1628542,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599988.pt"
]
},
{
"steps": 799925,
"file_path": "results/Huggy/Huggy/Huggy-799925.onnx",
"reward": 3.669014322526247,
"creation_time": 1682373773.990389,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799925.pt"
]
},
{
"steps": 999997,
"file_path": "results/Huggy/Huggy/Huggy-999997.onnx",
"reward": 3.6733190279076067,
"creation_time": 1682374014.4352543,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999997.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy/Huggy/Huggy-1199964.onnx",
"reward": 3.7877651158865397,
"creation_time": 1682374254.407185,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399992,
"file_path": "results/Huggy/Huggy/Huggy-1399992.onnx",
"reward": 3.6001224177224294,
"creation_time": 1682374490.4311073,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399992.pt"
]
},
{
"steps": 1599979,
"file_path": "results/Huggy/Huggy/Huggy-1599979.onnx",
"reward": 3.7980509386821226,
"creation_time": 1682374722.5932508,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599979.pt"
]
},
{
"steps": 1799610,
"file_path": "results/Huggy/Huggy/Huggy-1799610.onnx",
"reward": 3.7824178608981045,
"creation_time": 1682374960.9384956,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799610.pt"
]
},
{
"steps": 1999946,
"file_path": "results/Huggy/Huggy/Huggy-1999946.onnx",
"reward": 3.599144572797029,
"creation_time": 1682375200.6808631,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999946.pt"
]
},
{
"steps": 2000084,
"file_path": "results/Huggy/Huggy/Huggy-2000084.onnx",
"reward": 3.6454020155237075,
"creation_time": 1682375200.7976756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000084.pt"
]
}
],
"final_checkpoint": {
"steps": 2000084,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6454020155237075,
"creation_time": 1682375200.7976756,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000084.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}