ppo-huggy / run_logs /training_status.json
pabloramesc's picture
Huggy
16b4c3a verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199855,
"file_path": "results/Huggy/Huggy/Huggy-199855.onnx",
"reward": 3.355182964923018,
"creation_time": 1761669122.2272208,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199855.pt"
]
},
{
"steps": 399801,
"file_path": "results/Huggy/Huggy/Huggy-399801.onnx",
"reward": 3.6606795967571317,
"creation_time": 1761669339.1717603,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399801.pt"
]
},
{
"steps": 599946,
"file_path": "results/Huggy/Huggy/Huggy-599946.onnx",
"reward": 3.6276222747914932,
"creation_time": 1761669563.8778443,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599946.pt"
]
},
{
"steps": 799603,
"file_path": "results/Huggy/Huggy/Huggy-799603.onnx",
"reward": 3.667520899350951,
"creation_time": 1761669789.035605,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799603.pt"
]
},
{
"steps": 999962,
"file_path": "results/Huggy/Huggy/Huggy-999962.onnx",
"reward": 3.796411878338047,
"creation_time": 1761670027.93635,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999962.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy/Huggy/Huggy-1199957.onnx",
"reward": 3.145679422046827,
"creation_time": 1761670262.4665544,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399330,
"file_path": "results/Huggy/Huggy/Huggy-1399330.onnx",
"reward": 3.748446740801372,
"creation_time": 1761670494.2934184,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399330.pt"
]
},
{
"steps": 1599438,
"file_path": "results/Huggy/Huggy/Huggy-1599438.onnx",
"reward": 3.7850629539489744,
"creation_time": 1761670716.5492935,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599438.pt"
]
},
{
"steps": 1799912,
"file_path": "results/Huggy/Huggy/Huggy-1799912.onnx",
"reward": 3.5148888537378022,
"creation_time": 1761670952.2642245,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799912.pt"
]
},
{
"steps": 1999946,
"file_path": "results/Huggy/Huggy/Huggy-1999946.onnx",
"reward": null,
"creation_time": 1761671179.398759,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999946.pt"
]
},
{
"steps": 2000696,
"file_path": "results/Huggy/Huggy/Huggy-2000696.onnx",
"reward": -3.478188991546631,
"creation_time": 1761671179.5222552,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000696.pt"
]
}
],
"final_checkpoint": {
"steps": 2000696,
"file_path": "results/Huggy/Huggy.onnx",
"reward": -3.478188991546631,
"creation_time": 1761671179.5222552,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000696.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}