ppo-Huggy / run_logs /training_status.json
ezrab's picture
Huggy
a128a6f verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199978,
"file_path": "results/Huggy2/Huggy/Huggy-199978.onnx",
"reward": 3.2933790000279743,
"creation_time": 1737586709.8345876,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199978.pt"
]
},
{
"steps": 399854,
"file_path": "results/Huggy2/Huggy/Huggy-399854.onnx",
"reward": 4.176482779364432,
"creation_time": 1737586913.413488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399854.pt"
]
},
{
"steps": 599922,
"file_path": "results/Huggy2/Huggy/Huggy-599922.onnx",
"reward": 3.1737344563007355,
"creation_time": 1737587121.4306228,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599922.pt"
]
},
{
"steps": 799927,
"file_path": "results/Huggy2/Huggy/Huggy-799927.onnx",
"reward": 4.186344658909132,
"creation_time": 1737587326.1180978,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799927.pt"
]
},
{
"steps": 999990,
"file_path": "results/Huggy2/Huggy/Huggy-999990.onnx",
"reward": 4.02551754818687,
"creation_time": 1737587537.3526673,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999990.pt"
]
},
{
"steps": 1199995,
"file_path": "results/Huggy2/Huggy/Huggy-1199995.onnx",
"reward": 3.8935896117313233,
"creation_time": 1737587748.3073452,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199995.pt"
]
},
{
"steps": 1399866,
"file_path": "results/Huggy2/Huggy/Huggy-1399866.onnx",
"reward": 3.9919437113262357,
"creation_time": 1737587959.7888675,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399866.pt"
]
},
{
"steps": 1599938,
"file_path": "results/Huggy2/Huggy/Huggy-1599938.onnx",
"reward": 3.898536809143566,
"creation_time": 1737588168.901185,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599938.pt"
]
},
{
"steps": 1799918,
"file_path": "results/Huggy2/Huggy/Huggy-1799918.onnx",
"reward": 3.6965121190462793,
"creation_time": 1737588381.4333951,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799918.pt"
]
},
{
"steps": 1999977,
"file_path": "results/Huggy2/Huggy/Huggy-1999977.onnx",
"reward": 3.8669939523651484,
"creation_time": 1737588592.803184,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999977.pt"
]
},
{
"steps": 2000052,
"file_path": "results/Huggy2/Huggy/Huggy-2000052.onnx",
"reward": 3.8800784251269174,
"creation_time": 1737588592.9053698,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000052.pt"
]
}
],
"final_checkpoint": {
"steps": 2000052,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8800784251269174,
"creation_time": 1737588592.9053698,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000052.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.5.1+cu121"
}
}