ppo-Huggy / run_logs /training_status.json
chabir78's picture
Huggy
794e2a7
{
"Huggy": {
"checkpoints": [
{
"steps": 199825,
"file_path": "results/Huggy/Huggy/Huggy-199825.onnx",
"reward": 3.247880337635676,
"creation_time": 1695475257.0636292,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199825.pt"
]
},
{
"steps": 399849,
"file_path": "results/Huggy/Huggy/Huggy-399849.onnx",
"reward": 3.638603004813194,
"creation_time": 1695475502.8089457,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399849.pt"
]
},
{
"steps": 599929,
"file_path": "results/Huggy/Huggy/Huggy-599929.onnx",
"reward": 3.3136829137802124,
"creation_time": 1695475755.358849,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599929.pt"
]
},
{
"steps": 799969,
"file_path": "results/Huggy/Huggy/Huggy-799969.onnx",
"reward": 3.745181730872876,
"creation_time": 1695476005.4603758,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799969.pt"
]
},
{
"steps": 999925,
"file_path": "results/Huggy/Huggy/Huggy-999925.onnx",
"reward": 3.9052358713555844,
"creation_time": 1695476259.53737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999925.pt"
]
},
{
"steps": 1199880,
"file_path": "results/Huggy/Huggy/Huggy-1199880.onnx",
"reward": 3.8056162034764007,
"creation_time": 1695476508.326182,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199880.pt"
]
},
{
"steps": 1399987,
"file_path": "results/Huggy/Huggy/Huggy-1399987.onnx",
"reward": 3.7820787008651005,
"creation_time": 1695476757.957966,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399987.pt"
]
},
{
"steps": 1599906,
"file_path": "results/Huggy/Huggy/Huggy-1599906.onnx",
"reward": 3.504992272172655,
"creation_time": 1695477009.3640673,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599906.pt"
]
},
{
"steps": 1799380,
"file_path": "results/Huggy/Huggy/Huggy-1799380.onnx",
"reward": 3.9074878121415773,
"creation_time": 1695477256.9355547,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799380.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy/Huggy/Huggy-1999981.onnx",
"reward": 3.687518569011262,
"creation_time": 1695477500.9378421,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000040,
"file_path": "results/Huggy/Huggy/Huggy-2000040.onnx",
"reward": 3.6867438942193984,
"creation_time": 1695477501.1314156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000040.pt"
]
}
],
"final_checkpoint": {
"steps": 2000040,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6867438942193984,
"creation_time": 1695477501.1314156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000040.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}