ppo-Huggy / run_logs /training_status.json
Suprim003's picture
Huggy
5aecd2b verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199694,
"file_path": "results/Huggy2/Huggy/Huggy-199694.onnx",
"reward": 3.333495828054719,
"creation_time": 1752562619.2389195,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199694.pt"
]
},
{
"steps": 399920,
"file_path": "results/Huggy2/Huggy/Huggy-399920.onnx",
"reward": 3.309882313777239,
"creation_time": 1752562931.6586657,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399920.pt"
]
},
{
"steps": 599616,
"file_path": "results/Huggy2/Huggy/Huggy-599616.onnx",
"reward": 3.5876735895872116,
"creation_time": 1752563256.132379,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599616.pt"
]
},
{
"steps": 799945,
"file_path": "results/Huggy2/Huggy/Huggy-799945.onnx",
"reward": 3.920008843777164,
"creation_time": 1752563568.234538,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799945.pt"
]
},
{
"steps": 999870,
"file_path": "results/Huggy2/Huggy/Huggy-999870.onnx",
"reward": 3.51735393540694,
"creation_time": 1752563898.796583,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999870.pt"
]
},
{
"steps": 1199868,
"file_path": "results/Huggy2/Huggy/Huggy-1199868.onnx",
"reward": 3.904013332482931,
"creation_time": 1752564226.8087778,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199868.pt"
]
},
{
"steps": 1399328,
"file_path": "results/Huggy2/Huggy/Huggy-1399328.onnx",
"reward": 3.9648561508227615,
"creation_time": 1752564550.9487908,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399328.pt"
]
},
{
"steps": 1599984,
"file_path": "results/Huggy2/Huggy/Huggy-1599984.onnx",
"reward": 3.9130741017205373,
"creation_time": 1752564880.3828504,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599984.pt"
]
},
{
"steps": 1799970,
"file_path": "results/Huggy2/Huggy/Huggy-1799970.onnx",
"reward": 3.5470425673277983,
"creation_time": 1752565206.9634302,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799970.pt"
]
},
{
"steps": 1999827,
"file_path": "results/Huggy2/Huggy/Huggy-1999827.onnx",
"reward": 3.386573314666748,
"creation_time": 1752565531.605572,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999827.pt"
]
},
{
"steps": 2000577,
"file_path": "results/Huggy2/Huggy/Huggy-2000577.onnx",
"reward": 2.9180357615152994,
"creation_time": 1752565531.8120663,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000577.pt"
]
}
],
"final_checkpoint": {
"steps": 2000577,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 2.9180357615152994,
"creation_time": 1752565531.8120663,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000577.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.2.0.dev0",
"torch_version": "2.7.1+cu126"
}
}