ppo-Huggy / run_logs /training_status.json
Cesar514's picture
Huggy
ea86e4e
{
"Huggy": {
"checkpoints": [
{
"steps": 199930,
"file_path": "results/Huggy/Huggy/Huggy-199930.onnx",
"reward": 3.600029250553676,
"creation_time": 1671312803.5242596,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199930.pt"
]
},
{
"steps": 399947,
"file_path": "results/Huggy/Huggy/Huggy-399947.onnx",
"reward": 3.918799001829965,
"creation_time": 1671313023.2941663,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399947.pt"
]
},
{
"steps": 599820,
"file_path": "results/Huggy/Huggy/Huggy-599820.onnx",
"reward": 2.809450462460518,
"creation_time": 1671313244.2063737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599820.pt"
]
},
{
"steps": 799981,
"file_path": "results/Huggy/Huggy/Huggy-799981.onnx",
"reward": 3.797160910913743,
"creation_time": 1671313462.7737217,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799981.pt"
]
},
{
"steps": 999946,
"file_path": "results/Huggy/Huggy/Huggy-999946.onnx",
"reward": 3.7443147163305963,
"creation_time": 1671313682.4426773,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999946.pt"
]
},
{
"steps": 1199865,
"file_path": "results/Huggy/Huggy/Huggy-1199865.onnx",
"reward": 3.8834728188812733,
"creation_time": 1671313904.2485282,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199865.pt"
]
},
{
"steps": 1399989,
"file_path": "results/Huggy/Huggy/Huggy-1399989.onnx",
"reward": 3.8033874429596795,
"creation_time": 1671314122.2058203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399989.pt"
]
},
{
"steps": 1599990,
"file_path": "results/Huggy/Huggy/Huggy-1599990.onnx",
"reward": 3.8462934083752818,
"creation_time": 1671314343.7456837,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599990.pt"
]
},
{
"steps": 1799854,
"file_path": "results/Huggy/Huggy/Huggy-1799854.onnx",
"reward": 3.8706783935427667,
"creation_time": 1671314562.8771253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799854.pt"
]
},
{
"steps": 1999877,
"file_path": "results/Huggy/Huggy/Huggy-1999877.onnx",
"reward": 3.1429054074817233,
"creation_time": 1671314784.2173448,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999877.pt"
]
},
{
"steps": 2000627,
"file_path": "results/Huggy/Huggy/Huggy-2000627.onnx",
"reward": 2.4342500805854796,
"creation_time": 1671314784.3666086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000627.pt"
]
}
],
"final_checkpoint": {
"steps": 2000627,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.4342500805854796,
"creation_time": 1671314784.3666086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000627.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}