ppo-Huggy / run_logs /training_status.json
ZZVic's picture
Huggy
c8131e4 verified
{
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0",
"torch_version": "2.9.1+cu128"
},
"Huggy": {
"checkpoints": [
{
"steps": 199972,
"file_path": "results/Huggy/Huggy/Huggy-199972.onnx",
"reward": 0.8976404666900635,
"creation_time": 1764955593.885466,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199972.pt"
]
},
{
"steps": 399986,
"file_path": "results/Huggy/Huggy/Huggy-399986.onnx",
"reward": 3.6281656418527874,
"creation_time": 1764956004.554875,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399986.pt"
]
},
{
"steps": 599946,
"file_path": "results/Huggy/Huggy/Huggy-599946.onnx",
"reward": 3.7598275677363078,
"creation_time": 1764956416.3392446,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599946.pt"
]
},
{
"steps": 799975,
"file_path": "results/Huggy/Huggy/Huggy-799975.onnx",
"reward": 4.38282136619091,
"creation_time": 1764956824.9456372,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799975.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 3.89854073463027,
"creation_time": 1764957216.2460628,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199915,
"file_path": "results/Huggy/Huggy/Huggy-1199915.onnx",
"reward": 3.9129178597002614,
"creation_time": 1764957594.138075,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199915.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy/Huggy/Huggy-1399982.onnx",
"reward": 3.5639790641256126,
"creation_time": 1764957984.8731086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599723,
"file_path": "results/Huggy/Huggy/Huggy-1599723.onnx",
"reward": 3.7384797864490085,
"creation_time": 1764958377.199024,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599723.pt"
]
},
{
"steps": 1799956,
"file_path": "results/Huggy/Huggy/Huggy-1799956.onnx",
"reward": 3.7690038522084555,
"creation_time": 1764958742.0252411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799956.pt"
]
},
{
"steps": 1999967,
"file_path": "results/Huggy/Huggy/Huggy-1999967.onnx",
"reward": 3.7254941390908285,
"creation_time": 1764959129.6730123,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999967.pt"
]
},
{
"steps": 2000073,
"file_path": "results/Huggy/Huggy/Huggy-2000073.onnx",
"reward": 3.7467225327573974,
"creation_time": 1764959129.7382472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
],
"final_checkpoint": {
"steps": 2000073,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7467225327573974,
"creation_time": 1764959129.7382472,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000073.pt"
]
}
}
}