ppo-Huggy / run_logs /training_status.json
wlchee's picture
Willy Huggy
834dbe1 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199898,
"file_path": "results/Huggy2/Huggy/Huggy-199898.onnx",
"reward": 3.3787167333066463,
"creation_time": 1741679187.115186,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199898.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy2/Huggy/Huggy-399895.onnx",
"reward": 3.6775173698410843,
"creation_time": 1741679420.7499502,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599870,
"file_path": "results/Huggy2/Huggy/Huggy-599870.onnx",
"reward": 4.192452669143677,
"creation_time": 1741679664.5646818,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599870.pt"
]
},
{
"steps": 799893,
"file_path": "results/Huggy2/Huggy/Huggy-799893.onnx",
"reward": 3.6767398026418983,
"creation_time": 1741679902.4905493,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799893.pt"
]
},
{
"steps": 999914,
"file_path": "results/Huggy2/Huggy/Huggy-999914.onnx",
"reward": 3.807242632459063,
"creation_time": 1741680142.6607409,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999914.pt"
]
},
{
"steps": 1199905,
"file_path": "results/Huggy2/Huggy/Huggy-1199905.onnx",
"reward": 3.8625377091494473,
"creation_time": 1741680387.177481,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199905.pt"
]
},
{
"steps": 1399882,
"file_path": "results/Huggy2/Huggy/Huggy-1399882.onnx",
"reward": 3.4698600624457443,
"creation_time": 1741680624.1891603,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399882.pt"
]
},
{
"steps": 1599968,
"file_path": "results/Huggy2/Huggy/Huggy-1599968.onnx",
"reward": 3.7218624775979054,
"creation_time": 1741680863.8816483,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599968.pt"
]
},
{
"steps": 1799965,
"file_path": "results/Huggy2/Huggy/Huggy-1799965.onnx",
"reward": 3.7323270197267884,
"creation_time": 1741681104.2017016,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799965.pt"
]
},
{
"steps": 1999981,
"file_path": "results/Huggy2/Huggy/Huggy-1999981.onnx",
"reward": 3.8115033624794683,
"creation_time": 1741681343.6730306,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999981.pt"
]
},
{
"steps": 2000041,
"file_path": "results/Huggy2/Huggy/Huggy-2000041.onnx",
"reward": 3.8072394474464306,
"creation_time": 1741681343.7834685,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
],
"final_checkpoint": {
"steps": 2000041,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8072394474464306,
"creation_time": 1741681343.7834685,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000041.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.6.0+cu124"
}
}