ppo-Huggy / run_logs /training_status.json
wilt8's picture
Huggy
f6ed0e0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199988,
"file_path": "results/Huggy2/Huggy/Huggy-199988.onnx",
"reward": 3.44614149312504,
"creation_time": 1724829845.3544207,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199988.pt"
]
},
{
"steps": 399859,
"file_path": "results/Huggy2/Huggy/Huggy-399859.onnx",
"reward": 3.7512606341960066,
"creation_time": 1724830084.4293838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399859.pt"
]
},
{
"steps": 599994,
"file_path": "results/Huggy2/Huggy/Huggy-599994.onnx",
"reward": 3.9410842886337867,
"creation_time": 1724830327.549394,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599994.pt"
]
},
{
"steps": 799875,
"file_path": "results/Huggy2/Huggy/Huggy-799875.onnx",
"reward": 3.861460106713431,
"creation_time": 1724830568.9623766,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799875.pt"
]
},
{
"steps": 999921,
"file_path": "results/Huggy2/Huggy/Huggy-999921.onnx",
"reward": 3.7047892987293047,
"creation_time": 1724830812.329513,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999921.pt"
]
},
{
"steps": 1199957,
"file_path": "results/Huggy2/Huggy/Huggy-1199957.onnx",
"reward": 3.6350948131537137,
"creation_time": 1724831057.3038979,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199957.pt"
]
},
{
"steps": 1399909,
"file_path": "results/Huggy2/Huggy/Huggy-1399909.onnx",
"reward": 2.9597804993391037,
"creation_time": 1724831316.645971,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399909.pt"
]
},
{
"steps": 1599927,
"file_path": "results/Huggy2/Huggy/Huggy-1599927.onnx",
"reward": 4.059667073488235,
"creation_time": 1724831618.4057574,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599927.pt"
]
},
{
"steps": 1799983,
"file_path": "results/Huggy2/Huggy/Huggy-1799983.onnx",
"reward": 3.6676497450896672,
"creation_time": 1724831933.6903646,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799983.pt"
]
},
{
"steps": 1999993,
"file_path": "results/Huggy2/Huggy/Huggy-1999993.onnx",
"reward": 3.8054655290419057,
"creation_time": 1724832246.6003938,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999993.pt"
]
},
{
"steps": 2000077,
"file_path": "results/Huggy2/Huggy/Huggy-2000077.onnx",
"reward": 3.8053039520505876,
"creation_time": 1724832246.726118,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000077.pt"
]
}
],
"final_checkpoint": {
"steps": 2000077,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8053039520505876,
"creation_time": 1724832246.726118,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000077.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.4.0+cu121"
}
}