ppo-Huggy / run_logs /training_status.json
JYL480's picture
Huggy
337d21e verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199776,
"file_path": "results/Huggy/Huggy/Huggy-199776.onnx",
"reward": 3.3432542367414997,
"creation_time": 1725304523.138101,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199776.pt"
]
},
{
"steps": 399881,
"file_path": "results/Huggy/Huggy/Huggy-399881.onnx",
"reward": 3.6677253069701017,
"creation_time": 1725304768.7437367,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399881.pt"
]
},
{
"steps": 599973,
"file_path": "results/Huggy/Huggy/Huggy-599973.onnx",
"reward": 4.325945615768433,
"creation_time": 1725305015.1372535,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599973.pt"
]
},
{
"steps": 799982,
"file_path": "results/Huggy/Huggy/Huggy-799982.onnx",
"reward": 3.849228621226305,
"creation_time": 1725305258.5884173,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799982.pt"
]
},
{
"steps": 999984,
"file_path": "results/Huggy/Huggy/Huggy-999984.onnx",
"reward": 3.6875375413712654,
"creation_time": 1725305506.1295207,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999984.pt"
]
},
{
"steps": 1199875,
"file_path": "results/Huggy/Huggy/Huggy-1199875.onnx",
"reward": 3.608799163500468,
"creation_time": 1725305753.1198046,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199875.pt"
]
},
{
"steps": 1399956,
"file_path": "results/Huggy/Huggy/Huggy-1399956.onnx",
"reward": 3.9795129831348146,
"creation_time": 1725305998.5541897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399956.pt"
]
},
{
"steps": 1599922,
"file_path": "results/Huggy/Huggy/Huggy-1599922.onnx",
"reward": 4.098622315969223,
"creation_time": 1725306247.7506003,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599922.pt"
]
},
{
"steps": 1799947,
"file_path": "results/Huggy/Huggy/Huggy-1799947.onnx",
"reward": 3.996229685842991,
"creation_time": 1725306497.8022897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799947.pt"
]
},
{
"steps": 1999975,
"file_path": "results/Huggy/Huggy/Huggy-1999975.onnx",
"reward": 4.888420173755059,
"creation_time": 1725306746.553651,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999975.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy/Huggy/Huggy-2000055.onnx",
"reward": 4.8859928228237015,
"creation_time": 1725306746.674634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.8859928228237015,
"creation_time": 1725306746.674634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}