ppo-Huggy / run_logs /training_status.json
Adiii143's picture
Huggy
c26569d verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199982,
"file_path": "results/Huggy/Huggy/Huggy-199982.onnx",
"reward": 3.4506173359936683,
"creation_time": 1743073884.9502547,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199982.pt"
]
},
{
"steps": 399775,
"file_path": "results/Huggy/Huggy/Huggy-399775.onnx",
"reward": 4.016834703274071,
"creation_time": 1743074122.6395557,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399775.pt"
]
},
{
"steps": 599983,
"file_path": "results/Huggy/Huggy/Huggy-599983.onnx",
"reward": 4.12455060975305,
"creation_time": 1743074368.1026351,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599983.pt"
]
},
{
"steps": 799904,
"file_path": "results/Huggy/Huggy/Huggy-799904.onnx",
"reward": 3.7327653746756297,
"creation_time": 1743074608.8669446,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799904.pt"
]
},
{
"steps": 999999,
"file_path": "results/Huggy/Huggy/Huggy-999999.onnx",
"reward": 4.086517466614578,
"creation_time": 1743074856.7356198,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999999.pt"
]
},
{
"steps": 1199788,
"file_path": "results/Huggy/Huggy/Huggy-1199788.onnx",
"reward": 3.6126629704296,
"creation_time": 1743075099.08253,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199788.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy/Huggy/Huggy-1399994.onnx",
"reward": 3.843782908776227,
"creation_time": 1743075344.5857954,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.7567160373510315,
"creation_time": 1743075588.6538763,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799999,
"file_path": "results/Huggy/Huggy/Huggy-1799999.onnx",
"reward": 3.589202346237561,
"creation_time": 1743075831.3522537,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799999.pt"
]
},
{
"steps": 1999937,
"file_path": "results/Huggy/Huggy/Huggy-1999937.onnx",
"reward": 3.5534467577934263,
"creation_time": 1743076075.9106731,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999937.pt"
]
},
{
"steps": 2000022,
"file_path": "results/Huggy/Huggy/Huggy-2000022.onnx",
"reward": 3.6031883518870282,
"creation_time": 1743076076.037309,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
],
"final_checkpoint": {
"steps": 2000022,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.6031883518870282,
"creation_time": 1743076076.037309,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000022.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0",
"torch_version": "2.6.0+cu124"
}
}