ppo-Huggy / run_logs /training_status.json
winssu's picture
Huggy
64722fe verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199921,
"file_path": "results/Huggy2/Huggy/Huggy-199921.onnx",
"reward": 3.007858137289683,
"creation_time": 1747725488.373134,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199921.pt"
]
},
{
"steps": 399981,
"file_path": "results/Huggy2/Huggy/Huggy-399981.onnx",
"reward": 3.662164121866226,
"creation_time": 1747725731.1349137,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399981.pt"
]
},
{
"steps": 599954,
"file_path": "results/Huggy2/Huggy/Huggy-599954.onnx",
"reward": 2.9212472875912985,
"creation_time": 1747725978.8523822,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599954.pt"
]
},
{
"steps": 799350,
"file_path": "results/Huggy2/Huggy/Huggy-799350.onnx",
"reward": 3.887133934570633,
"creation_time": 1747726228.0243742,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799350.pt"
]
},
{
"steps": 999993,
"file_path": "results/Huggy2/Huggy/Huggy-999993.onnx",
"reward": 3.7436341952000345,
"creation_time": 1747726483.4258761,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999993.pt"
]
},
{
"steps": 1199974,
"file_path": "results/Huggy2/Huggy/Huggy-1199974.onnx",
"reward": 3.9072196991717227,
"creation_time": 1747726735.4889228,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199974.pt"
]
},
{
"steps": 1399998,
"file_path": "results/Huggy2/Huggy/Huggy-1399998.onnx",
"reward": 3.9152413013246323,
"creation_time": 1747726988.721044,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399998.pt"
]
},
{
"steps": 1599971,
"file_path": "results/Huggy2/Huggy/Huggy-1599971.onnx",
"reward": 3.841100888867532,
"creation_time": 1747727247.4340084,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599971.pt"
]
},
{
"steps": 1799449,
"file_path": "results/Huggy2/Huggy/Huggy-1799449.onnx",
"reward": 3.3567433684736816,
"creation_time": 1747727504.3416781,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799449.pt"
]
},
{
"steps": 1999991,
"file_path": "results/Huggy2/Huggy/Huggy-1999991.onnx",
"reward": 3.5822052538394926,
"creation_time": 1747727763.237161,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999991.pt"
]
},
{
"steps": 2000055,
"file_path": "results/Huggy2/Huggy/Huggy-2000055.onnx",
"reward": 3.616616368293762,
"creation_time": 1747727763.3733423,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000055.pt"
]
}
],
"final_checkpoint": {
"steps": 2000055,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.616616368293762,
"creation_time": 1747727763.3733423,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000055.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.0+cu126"
}
}