ppo-Huggy / run_logs /training_status.json
silver4444's picture
Huggy
03c06af verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199403,
"file_path": "results/Huggy2/Huggy/Huggy-199403.onnx",
"reward": 3.3317833659904346,
"creation_time": 1767339201.2374942,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199403.pt"
]
},
{
"steps": 399961,
"file_path": "results/Huggy2/Huggy/Huggy-399961.onnx",
"reward": 3.8494550276685646,
"creation_time": 1767339468.5240288,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399961.pt"
]
},
{
"steps": 599955,
"file_path": "results/Huggy2/Huggy/Huggy-599955.onnx",
"reward": 4.447453498840332,
"creation_time": 1767339739.5633628,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599955.pt"
]
},
{
"steps": 799832,
"file_path": "results/Huggy2/Huggy/Huggy-799832.onnx",
"reward": 3.7374684010576082,
"creation_time": 1767340006.007708,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799832.pt"
]
},
{
"steps": 999955,
"file_path": "results/Huggy2/Huggy/Huggy-999955.onnx",
"reward": 3.733880088640296,
"creation_time": 1767340279.6659486,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999955.pt"
]
},
{
"steps": 1199964,
"file_path": "results/Huggy2/Huggy/Huggy-1199964.onnx",
"reward": 3.8276602725187936,
"creation_time": 1767340554.5624578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199964.pt"
]
},
{
"steps": 1399853,
"file_path": "results/Huggy2/Huggy/Huggy-1399853.onnx",
"reward": 4.0360705852508545,
"creation_time": 1767340827.9153144,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399853.pt"
]
},
{
"steps": 1599939,
"file_path": "results/Huggy2/Huggy/Huggy-1599939.onnx",
"reward": 3.856640597815236,
"creation_time": 1767341099.0218406,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599939.pt"
]
},
{
"steps": 1799893,
"file_path": "results/Huggy2/Huggy/Huggy-1799893.onnx",
"reward": 3.731956547889553,
"creation_time": 1767341373.3330133,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799893.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy2/Huggy/Huggy-1999981.onnx",
"reward": 4.343947758277257,
"creation_time": 1767341644.0819192,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy2/Huggy/Huggy-2000030.onnx",
"reward": 4.297128074023188,
"creation_time": 1767341644.1936085,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.297128074023188,
"creation_time": 1767341644.1936085,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}