ppo-Huggy / run_logs /training_status.json
sighmon's picture
Huggy
41501eb verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199920,
"file_path": "results/Huggy2/Huggy/Huggy-199920.onnx",
"reward": 3.646545788874993,
"creation_time": 1737369416.3047247,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199920.pt"
]
},
{
"steps": 399924,
"file_path": "results/Huggy2/Huggy/Huggy-399924.onnx",
"reward": 3.487220827377204,
"creation_time": 1737369658.5015078,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399924.pt"
]
},
{
"steps": 599989,
"file_path": "results/Huggy2/Huggy/Huggy-599989.onnx",
"reward": 3.6328848963198452,
"creation_time": 1737369905.6370578,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599989.pt"
]
},
{
"steps": 799723,
"file_path": "results/Huggy2/Huggy/Huggy-799723.onnx",
"reward": 3.783839183564512,
"creation_time": 1737370145.9444487,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799723.pt"
]
},
{
"steps": 999582,
"file_path": "results/Huggy2/Huggy/Huggy-999582.onnx",
"reward": 3.6649587911660553,
"creation_time": 1737370397.4394119,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999582.pt"
]
},
{
"steps": 1199998,
"file_path": "results/Huggy2/Huggy/Huggy-1199998.onnx",
"reward": 3.5242380138853906,
"creation_time": 1737370648.2636316,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199998.pt"
]
},
{
"steps": 1399712,
"file_path": "results/Huggy2/Huggy/Huggy-1399712.onnx",
"reward": null,
"creation_time": 1737370896.7513216,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399712.pt"
]
},
{
"steps": 1599997,
"file_path": "results/Huggy2/Huggy/Huggy-1599997.onnx",
"reward": 3.9130138027075247,
"creation_time": 1737371137.1116626,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599997.pt"
]
},
{
"steps": 1799950,
"file_path": "results/Huggy2/Huggy/Huggy-1799950.onnx",
"reward": 4.119306051403011,
"creation_time": 1737371381.5709167,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799950.pt"
]
},
{
"steps": 1999954,
"file_path": "results/Huggy2/Huggy/Huggy-1999954.onnx",
"reward": 3.61915788229774,
"creation_time": 1737371623.295726,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999954.pt"
]
},
{
"steps": 2000026,
"file_path": "results/Huggy2/Huggy/Huggy-2000026.onnx",
"reward": 3.653249420438494,
"creation_time": 1737371623.422798,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000026.pt"
]
}
],
"final_checkpoint": {
"steps": 2000026,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.653249420438494,
"creation_time": 1737371623.422798,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000026.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu124"
}
}