ppo-Huggy / run_logs /training_status.json
Fancellu's picture
Huggy
4d019fb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199995,
"file_path": "results/Huggy2/Huggy/Huggy-199995.onnx",
"reward": 3.292427447361824,
"creation_time": 1739277916.2332118,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199995.pt"
]
},
{
"steps": 399863,
"file_path": "results/Huggy2/Huggy/Huggy-399863.onnx",
"reward": 3.609001280769469,
"creation_time": 1739278147.5203578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399863.pt"
]
},
{
"steps": 599879,
"file_path": "results/Huggy2/Huggy/Huggy-599879.onnx",
"reward": 4.1518625020980835,
"creation_time": 1739278380.4396708,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599879.pt"
]
},
{
"steps": 799914,
"file_path": "results/Huggy2/Huggy/Huggy-799914.onnx",
"reward": 3.8984494004156685,
"creation_time": 1739278612.425417,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799914.pt"
]
},
{
"steps": 999958,
"file_path": "results/Huggy2/Huggy/Huggy-999958.onnx",
"reward": 3.6717768395724506,
"creation_time": 1739278850.8397665,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999958.pt"
]
},
{
"steps": 1199855,
"file_path": "results/Huggy2/Huggy/Huggy-1199855.onnx",
"reward": 3.256841815036276,
"creation_time": 1739279090.075736,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199855.pt"
]
},
{
"steps": 1399536,
"file_path": "results/Huggy2/Huggy/Huggy-1399536.onnx",
"reward": 3.6792092026748096,
"creation_time": 1739279325.1892962,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399536.pt"
]
},
{
"steps": 1599531,
"file_path": "results/Huggy2/Huggy/Huggy-1599531.onnx",
"reward": 3.7569404986354855,
"creation_time": 1739279563.450996,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599531.pt"
]
},
{
"steps": 1799900,
"file_path": "results/Huggy2/Huggy/Huggy-1799900.onnx",
"reward": 3.8631165474653244,
"creation_time": 1739279799.5054479,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799900.pt"
]
},
{
"steps": 1999506,
"file_path": "results/Huggy2/Huggy/Huggy-1999506.onnx",
"reward": 3.5764286626469004,
"creation_time": 1739280035.201748,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999506.pt"
]
},
{
"steps": 2000256,
"file_path": "results/Huggy2/Huggy/Huggy-2000256.onnx",
"reward": 3.047038664420446,
"creation_time": 1739280035.354022,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000256.pt"
]
}
],
"final_checkpoint": {
"steps": 2000256,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.047038664420446,
"creation_time": 1739280035.354022,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000256.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}