ppo-Huggy / run_logs /training_status.json
BearSubj13's picture
Huggy
5441cd8 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199789,
"file_path": "results/Huggy/Huggy/Huggy-199789.onnx",
"reward": 3.346133302725278,
"creation_time": 1740738305.377942,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199789.pt"
]
},
{
"steps": 399967,
"file_path": "results/Huggy/Huggy/Huggy-399967.onnx",
"reward": 3.7245171121929004,
"creation_time": 1740738545.5874465,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399967.pt"
]
},
{
"steps": 599991,
"file_path": "results/Huggy/Huggy/Huggy-599991.onnx",
"reward": 3.659709042516248,
"creation_time": 1740738789.2067592,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599991.pt"
]
},
{
"steps": 799983,
"file_path": "results/Huggy/Huggy/Huggy-799983.onnx",
"reward": 3.64285932411074,
"creation_time": 1740739032.2165031,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799983.pt"
]
},
{
"steps": 999930,
"file_path": "results/Huggy/Huggy/Huggy-999930.onnx",
"reward": 3.860000318632676,
"creation_time": 1740739281.0889564,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999930.pt"
]
},
{
"steps": 1199933,
"file_path": "results/Huggy/Huggy/Huggy-1199933.onnx",
"reward": 3.0567437028884887,
"creation_time": 1740739527.1480768,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199933.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy/Huggy/Huggy-1399964.onnx",
"reward": 3.4446669932334655,
"creation_time": 1740739769.9441144,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599687,
"file_path": "results/Huggy/Huggy/Huggy-1599687.onnx",
"reward": 3.3707434918199266,
"creation_time": 1740740016.9429066,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599687.pt"
]
},
{
"steps": 1799958,
"file_path": "results/Huggy/Huggy/Huggy-1799958.onnx",
"reward": 3.379764807851691,
"creation_time": 1740740259.2964208,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799958.pt"
]
},
{
"steps": 1999688,
"file_path": "results/Huggy/Huggy/Huggy-1999688.onnx",
"reward": 4.052652582526207,
"creation_time": 1740740499.2723444,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999688.pt"
]
},
{
"steps": 2000438,
"file_path": "results/Huggy/Huggy/Huggy-2000438.onnx",
"reward": 3.4518473010796766,
"creation_time": 1740740499.4176068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000438.pt"
]
}
],
"final_checkpoint": {
"steps": 2000438,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.4518473010796766,
"creation_time": 1740740499.4176068,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000438.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}