ppo-Huggy / run_logs /training_status.json
FabioDataGeek's picture
Huggy
19995d9
{
"Huggy": {
"checkpoints": [
{
"steps": 199976,
"file_path": "results/Huggy/Huggy/Huggy-199976.onnx",
"reward": 3.6581461171309155,
"creation_time": 1673424973.6512883,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199976.pt"
]
},
{
"steps": 399849,
"file_path": "results/Huggy/Huggy/Huggy-399849.onnx",
"reward": 3.8073378586378253,
"creation_time": 1673425190.887244,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399849.pt"
]
},
{
"steps": 599933,
"file_path": "results/Huggy/Huggy/Huggy-599933.onnx",
"reward": 4.065159829457601,
"creation_time": 1673425414.3497012,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599933.pt"
]
},
{
"steps": 799870,
"file_path": "results/Huggy/Huggy/Huggy-799870.onnx",
"reward": 3.82021801263453,
"creation_time": 1673425634.944059,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799870.pt"
]
},
{
"steps": 999991,
"file_path": "results/Huggy/Huggy/Huggy-999991.onnx",
"reward": 3.7966078635999714,
"creation_time": 1673425857.521553,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999991.pt"
]
},
{
"steps": 1199976,
"file_path": "results/Huggy/Huggy/Huggy-1199976.onnx",
"reward": 3.9562313582586204,
"creation_time": 1673426084.82658,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199976.pt"
]
},
{
"steps": 1399869,
"file_path": "results/Huggy/Huggy/Huggy-1399869.onnx",
"reward": 3.8207706692062806,
"creation_time": 1673426308.8111215,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399869.pt"
]
},
{
"steps": 1599988,
"file_path": "results/Huggy/Huggy/Huggy-1599988.onnx",
"reward": 3.7324985314388663,
"creation_time": 1673426533.8810914,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599988.pt"
]
},
{
"steps": 1799486,
"file_path": "results/Huggy/Huggy/Huggy-1799486.onnx",
"reward": 4.148672680421309,
"creation_time": 1673426757.9705606,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799486.pt"
]
},
{
"steps": 1999320,
"file_path": "results/Huggy/Huggy/Huggy-1999320.onnx",
"reward": 3.9530170775525937,
"creation_time": 1673426981.8402731,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999320.pt"
]
},
{
"steps": 2000070,
"file_path": "results/Huggy/Huggy/Huggy-2000070.onnx",
"reward": 3.922373549416963,
"creation_time": 1673426981.9916654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000070.pt"
]
}
],
"final_checkpoint": {
"steps": 2000070,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.922373549416963,
"creation_time": 1673426981.9916654,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000070.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}