ppo-Huggy / run_logs /training_status.json
prshanthreddy's picture
Huggy
2e785e6 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199995,
"file_path": "results/Huggy2/Huggy/Huggy-199995.onnx",
"reward": 3.3778995404372343,
"creation_time": 1749319036.2088497,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199995.pt"
]
},
{
"steps": 399984,
"file_path": "results/Huggy2/Huggy/Huggy-399984.onnx",
"reward": 3.5889526388861914,
"creation_time": 1749319278.1638925,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399984.pt"
]
},
{
"steps": 599920,
"file_path": "results/Huggy2/Huggy/Huggy-599920.onnx",
"reward": 4.075699864011822,
"creation_time": 1749319524.2631335,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599920.pt"
]
},
{
"steps": 799979,
"file_path": "results/Huggy2/Huggy/Huggy-799979.onnx",
"reward": 4.016064877145028,
"creation_time": 1749319762.6291196,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799979.pt"
]
},
{
"steps": 999821,
"file_path": "results/Huggy2/Huggy/Huggy-999821.onnx",
"reward": 3.885470464917048,
"creation_time": 1749320011.6702259,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999821.pt"
]
},
{
"steps": 1199983,
"file_path": "results/Huggy2/Huggy/Huggy-1199983.onnx",
"reward": 3.728831730636896,
"creation_time": 1749320259.5649838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199983.pt"
]
},
{
"steps": 1399998,
"file_path": "results/Huggy2/Huggy/Huggy-1399998.onnx",
"reward": 4.328170603321444,
"creation_time": 1749320507.5547638,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399998.pt"
]
},
{
"steps": 1599971,
"file_path": "results/Huggy2/Huggy/Huggy-1599971.onnx",
"reward": 4.128233942428874,
"creation_time": 1749320753.2269802,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599971.pt"
]
},
{
"steps": 1799946,
"file_path": "results/Huggy2/Huggy/Huggy-1799946.onnx",
"reward": 3.7695757652201305,
"creation_time": 1749321000.4350553,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799946.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy2/Huggy/Huggy-1999981.onnx",
"reward": 3.8472756914708808,
"creation_time": 1749321253.2485726,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000020,
"file_path": "results/Huggy2/Huggy/Huggy-2000020.onnx",
"reward": 3.8186177651990545,
"creation_time": 1749321253.4124262,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
],
"final_checkpoint": {
"steps": 2000020,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8186177651990545,
"creation_time": 1749321253.4124262,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000020.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}