ppo-Huggy / run_logs /training_status.json
HilbertS's picture
Huggy
3a8ef29
{
"Huggy": {
"checkpoints": [
{
"steps": 199942,
"file_path": "results/Huggy/Huggy/Huggy-199942.onnx",
"reward": 3.5781798771449496,
"creation_time": 1682351206.0641134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199942.pt"
]
},
{
"steps": 399989,
"file_path": "results/Huggy/Huggy/Huggy-399989.onnx",
"reward": 3.8326160353674017,
"creation_time": 1682351444.2832155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399989.pt"
]
},
{
"steps": 599997,
"file_path": "results/Huggy/Huggy/Huggy-599997.onnx",
"reward": 3.478140283908163,
"creation_time": 1682351689.9209492,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599997.pt"
]
},
{
"steps": 799915,
"file_path": "results/Huggy/Huggy/Huggy-799915.onnx",
"reward": 3.884981166232716,
"creation_time": 1682351941.4106698,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799915.pt"
]
},
{
"steps": 999977,
"file_path": "results/Huggy/Huggy/Huggy-999977.onnx",
"reward": 4.012836474221903,
"creation_time": 1682352193.630138,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999977.pt"
]
},
{
"steps": 1199952,
"file_path": "results/Huggy/Huggy/Huggy-1199952.onnx",
"reward": 4.040491801936452,
"creation_time": 1682352441.6714525,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199952.pt"
]
},
{
"steps": 1399951,
"file_path": "results/Huggy/Huggy/Huggy-1399951.onnx",
"reward": 5.68428955078125,
"creation_time": 1682352689.435326,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399951.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy/Huggy/Huggy-1599976.onnx",
"reward": 3.940449260839141,
"creation_time": 1682352932.247581,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799948,
"file_path": "results/Huggy/Huggy/Huggy-1799948.onnx",
"reward": 4.072493580856708,
"creation_time": 1682353193.1316106,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799948.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy/Huggy/Huggy-1999952.onnx",
"reward": 3.551220971184808,
"creation_time": 1682353456.3187916,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000033,
"file_path": "results/Huggy/Huggy/Huggy-2000033.onnx",
"reward": 3.6050026165811637,
"creation_time": 1682353456.5305235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
],
"final_checkpoint": {
"steps": 2000033,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6050026165811637,
"creation_time": 1682353456.5305235,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000033.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}