ppo-Huggy / run_logs /training_status.json
wendyl21's picture
Huggy
deb9e1d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199824,
"file_path": "results/Huggy2/Huggy/Huggy-199824.onnx",
"reward": 3.6534149646759033,
"creation_time": 1744636046.0394058,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199824.pt"
]
},
{
"steps": 399859,
"file_path": "results/Huggy2/Huggy/Huggy-399859.onnx",
"reward": 3.4577107247659717,
"creation_time": 1744636290.378374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399859.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy2/Huggy/Huggy-599973.onnx",
"reward": 2.254373156107389,
"creation_time": 1744636537.505045,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799986,
"file_path": "results/Huggy2/Huggy/Huggy-799986.onnx",
"reward": 3.7204922539847236,
"creation_time": 1744636781.1925485,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799986.pt"
]
},
{
"steps": 999957,
"file_path": "results/Huggy2/Huggy/Huggy-999957.onnx",
"reward": 3.7451627254486084,
"creation_time": 1744637028.3305073,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999957.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy2/Huggy/Huggy-1199970.onnx",
"reward": 4.2122643776237965,
"creation_time": 1744637274.3417852,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399998,
"file_path": "results/Huggy2/Huggy/Huggy-1399998.onnx",
"reward": 3.4781370162963867,
"creation_time": 1744637522.6177633,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399998.pt"
]
},
{
"steps": 1599984,
"file_path": "results/Huggy2/Huggy/Huggy-1599984.onnx",
"reward": 4.155208359808611,
"creation_time": 1744637766.2368217,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599984.pt"
]
},
{
"steps": 1799985,
"file_path": "results/Huggy2/Huggy/Huggy-1799985.onnx",
"reward": 3.586697288668982,
"creation_time": 1744638015.7479272,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799985.pt"
]
},
{
"steps": 1999904,
"file_path": "results/Huggy2/Huggy/Huggy-1999904.onnx",
"reward": 3.111040472984314,
"creation_time": 1744638263.553972,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999904.pt"
]
},
{
"steps": 2000654,
"file_path": "results/Huggy2/Huggy/Huggy-2000654.onnx",
"reward": 2.824093891226727,
"creation_time": 1744638263.7024713,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000654.pt"
]
}
],
"final_checkpoint": {
"steps": 2000654,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.824093891226727,
"creation_time": 1744638263.7024713,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000654.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}