ppo-Huggy / run_logs /training_status.json
JayLuci4's picture
Huggyv2
c969510 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199939,
"file_path": "results/Huggy2/Huggy/Huggy-199939.onnx",
"reward": 3.737848524389596,
"creation_time": 1759076065.6087654,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199939.pt"
]
},
{
"steps": 399959,
"file_path": "results/Huggy2/Huggy/Huggy-399959.onnx",
"reward": 3.410525559432923,
"creation_time": 1759076346.3627067,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399959.pt"
]
},
{
"steps": 599971,
"file_path": "results/Huggy2/Huggy/Huggy-599971.onnx",
"reward": 4.404620663324992,
"creation_time": 1759076630.697513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599971.pt"
]
},
{
"steps": 799934,
"file_path": "results/Huggy2/Huggy/Huggy-799934.onnx",
"reward": 3.6131303172845106,
"creation_time": 1759076913.2768276,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799934.pt"
]
},
{
"steps": 999949,
"file_path": "results/Huggy2/Huggy/Huggy-999949.onnx",
"reward": 3.694583613297035,
"creation_time": 1759077198.490636,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999949.pt"
]
},
{
"steps": 1199968,
"file_path": "results/Huggy2/Huggy/Huggy-1199968.onnx",
"reward": 3.9382626981962296,
"creation_time": 1759077484.9965944,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199968.pt"
]
},
{
"steps": 1399625,
"file_path": "results/Huggy2/Huggy/Huggy-1399625.onnx",
"reward": 3.6781206372621895,
"creation_time": 1759077770.2689786,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399625.pt"
]
},
{
"steps": 1599914,
"file_path": "results/Huggy2/Huggy/Huggy-1599914.onnx",
"reward": 3.901224872109237,
"creation_time": 1759078058.6735184,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599914.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy2/Huggy/Huggy-1799959.onnx",
"reward": 3.784127123551826,
"creation_time": 1759078346.2209918,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999858,
"file_path": "results/Huggy2/Huggy/Huggy-1999858.onnx",
"reward": 3.5886047077178955,
"creation_time": 1759078636.3527405,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999858.pt"
]
},
{
"steps": 2000608,
"file_path": "results/Huggy2/Huggy/Huggy-2000608.onnx",
"reward": 3.4214256772628198,
"creation_time": 1759078636.488945,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000608.pt"
]
}
],
"final_checkpoint": {
"steps": 2000608,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.4214256772628198,
"creation_time": 1759078636.488945,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000608.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.8.0+cu128"
}
}