ppo-huggy / run_logs /training_status.json
ektvho's picture
Huggy
bd6f625 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199980,
"file_path": "results/Huggy2/Huggy/Huggy-199980.onnx",
"reward": 3.2395579748683505,
"creation_time": 1734096571.4136264,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199980.pt"
]
},
{
"steps": 399881,
"file_path": "results/Huggy2/Huggy/Huggy-399881.onnx",
"reward": 3.561417119843619,
"creation_time": 1734096820.235354,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399881.pt"
]
},
{
"steps": 599951,
"file_path": "results/Huggy2/Huggy/Huggy-599951.onnx",
"reward": 4.229275248267434,
"creation_time": 1734097073.2409444,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599951.pt"
]
},
{
"steps": 799939,
"file_path": "results/Huggy2/Huggy/Huggy-799939.onnx",
"reward": 3.6861609192619547,
"creation_time": 1734097324.2320218,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799939.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy2/Huggy/Huggy-999961.onnx",
"reward": 3.3809856099111064,
"creation_time": 1734097577.3832653,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199740,
"file_path": "results/Huggy2/Huggy/Huggy-1199740.onnx",
"reward": 3.281011984154985,
"creation_time": 1734097833.769613,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199740.pt"
]
},
{
"steps": 1399298,
"file_path": "results/Huggy2/Huggy/Huggy-1399298.onnx",
"reward": 3.3041130574910262,
"creation_time": 1734098086.8607364,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399298.pt"
]
},
{
"steps": 1599946,
"file_path": "results/Huggy2/Huggy/Huggy-1599946.onnx",
"reward": 3.9628325418098687,
"creation_time": 1734098343.7704945,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599946.pt"
]
},
{
"steps": 1799432,
"file_path": "results/Huggy2/Huggy/Huggy-1799432.onnx",
"reward": 3.803221929818392,
"creation_time": 1734098603.199647,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799432.pt"
]
},
{
"steps": 1999952,
"file_path": "results/Huggy2/Huggy/Huggy-1999952.onnx",
"reward": 3.516981491289641,
"creation_time": 1734098854.961238,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999952.pt"
]
},
{
"steps": 2000009,
"file_path": "results/Huggy2/Huggy/Huggy-2000009.onnx",
"reward": 3.511770695291889,
"creation_time": 1734098855.139466,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000009.pt"
]
}
],
"final_checkpoint": {
"steps": 2000009,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.511770695291889,
"creation_time": 1734098855.139466,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000009.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}