ppo-Huggy / run_logs /training_status.json
Harrk's picture
Huggy model
fa1b7e2 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199786,
"file_path": "results/Huggy2/Huggy/Huggy-199786.onnx",
"reward": 3.4705598851044974,
"creation_time": 1748785987.6081398,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199786.pt"
]
},
{
"steps": 399863,
"file_path": "results/Huggy2/Huggy/Huggy-399863.onnx",
"reward": 3.483874964086633,
"creation_time": 1748786231.3887224,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399863.pt"
]
},
{
"steps": 599668,
"file_path": "results/Huggy2/Huggy/Huggy-599668.onnx",
"reward": 4.234572378071872,
"creation_time": 1748786482.2030382,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599668.pt"
]
},
{
"steps": 799968,
"file_path": "results/Huggy2/Huggy/Huggy-799968.onnx",
"reward": 3.9461928268068847,
"creation_time": 1748786729.230119,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799968.pt"
]
},
{
"steps": 999946,
"file_path": "results/Huggy2/Huggy/Huggy-999946.onnx",
"reward": 4.202582200050354,
"creation_time": 1748786981.0958953,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999946.pt"
]
},
{
"steps": 1199914,
"file_path": "results/Huggy2/Huggy/Huggy-1199914.onnx",
"reward": 3.871156841148565,
"creation_time": 1748787233.209047,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199914.pt"
]
},
{
"steps": 1399974,
"file_path": "results/Huggy2/Huggy/Huggy-1399974.onnx",
"reward": 4.419462621212006,
"creation_time": 1748787484.356331,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399974.pt"
]
},
{
"steps": 1599998,
"file_path": "results/Huggy2/Huggy/Huggy-1599998.onnx",
"reward": 4.005977100197305,
"creation_time": 1748787732.8846924,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599998.pt"
]
},
{
"steps": 1799936,
"file_path": "results/Huggy2/Huggy/Huggy-1799936.onnx",
"reward": 4.028973813464002,
"creation_time": 1748787985.380002,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799936.pt"
]
},
{
"steps": 1999990,
"file_path": "results/Huggy2/Huggy/Huggy-1999990.onnx",
"reward": 2.757160790761312,
"creation_time": 1748788233.8268445,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999990.pt"
]
},
{
"steps": 2000028,
"file_path": "results/Huggy2/Huggy/Huggy-2000028.onnx",
"reward": 2.6710515841841698,
"creation_time": 1748788233.9514558,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
],
"final_checkpoint": {
"steps": 2000028,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.6710515841841698,
"creation_time": 1748788233.9514558,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000028.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}