ppo-Huggy / run_logs /training_status.json
sErial03's picture
[ADD]:Upload Huggy model
82c5802 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199870,
"file_path": "results/Huggy2/Huggy/Huggy-199870.onnx",
"reward": 3.516437302162121,
"creation_time": 1735548842.2755685,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199870.pt"
]
},
{
"steps": 399760,
"file_path": "results/Huggy2/Huggy/Huggy-399760.onnx",
"reward": 3.6753446451004814,
"creation_time": 1735549291.9731336,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399760.pt"
]
},
{
"steps": 599986,
"file_path": "results/Huggy2/Huggy/Huggy-599986.onnx",
"reward": 3.6519421021143597,
"creation_time": 1735549741.5876436,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599986.pt"
]
},
{
"steps": 799993,
"file_path": "results/Huggy2/Huggy/Huggy-799993.onnx",
"reward": 3.952308575563793,
"creation_time": 1735550177.0220647,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799993.pt"
]
},
{
"steps": 999935,
"file_path": "results/Huggy2/Huggy/Huggy-999935.onnx",
"reward": 3.515025932164419,
"creation_time": 1735550624.625902,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999935.pt"
]
},
{
"steps": 1199517,
"file_path": "results/Huggy2/Huggy/Huggy-1199517.onnx",
"reward": 3.908572461546921,
"creation_time": 1735551071.8376508,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199517.pt"
]
},
{
"steps": 1399984,
"file_path": "results/Huggy2/Huggy/Huggy-1399984.onnx",
"reward": 3.881961586031803,
"creation_time": 1735551503.9696105,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399984.pt"
]
},
{
"steps": 1599987,
"file_path": "results/Huggy2/Huggy/Huggy-1599987.onnx",
"reward": 3.7628114877583143,
"creation_time": 1735551949.7018065,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599987.pt"
]
},
{
"steps": 1799343,
"file_path": "results/Huggy2/Huggy/Huggy-1799343.onnx",
"reward": 3.815121966859569,
"creation_time": 1735552394.732301,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799343.pt"
]
},
{
"steps": 1999872,
"file_path": "results/Huggy2/Huggy/Huggy-1999872.onnx",
"reward": 4.252090278424714,
"creation_time": 1735552842.869781,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999872.pt"
]
},
{
"steps": 2000010,
"file_path": "results/Huggy2/Huggy/Huggy-2000010.onnx",
"reward": 4.3158485174179075,
"creation_time": 1735552842.990276,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000010.pt"
]
}
],
"final_checkpoint": {
"steps": 2000010,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 4.3158485174179075,
"creation_time": 1735552842.990276,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000010.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}