ppo-Huggy / run_logs /training_status.json
aadarshram's picture
Huggy
dc7fb33 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199911,
"file_path": "results/Huggy2/Huggy/Huggy-199911.onnx",
"reward": 3.6775682559380165,
"creation_time": 1719583483.0610626,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199911.pt"
]
},
{
"steps": 399918,
"file_path": "results/Huggy2/Huggy/Huggy-399918.onnx",
"reward": 3.5129582047462464,
"creation_time": 1719583724.0909336,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399918.pt"
]
},
{
"steps": 599985,
"file_path": "results/Huggy2/Huggy/Huggy-599985.onnx",
"reward": 3.915606200695038,
"creation_time": 1719583971.5688322,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599985.pt"
]
},
{
"steps": 799959,
"file_path": "results/Huggy2/Huggy/Huggy-799959.onnx",
"reward": 3.605077318169854,
"creation_time": 1719584217.8247075,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799959.pt"
]
},
{
"steps": 999261,
"file_path": "results/Huggy2/Huggy/Huggy-999261.onnx",
"reward": 3.8389494399062727,
"creation_time": 1719584463.1178584,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999261.pt"
]
},
{
"steps": 1199991,
"file_path": "results/Huggy2/Huggy/Huggy-1199991.onnx",
"reward": 3.985802202313035,
"creation_time": 1719584714.1925657,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199991.pt"
]
},
{
"steps": 1399879,
"file_path": "results/Huggy2/Huggy/Huggy-1399879.onnx",
"reward": 3.789701476241603,
"creation_time": 1719584955.7011771,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399879.pt"
]
},
{
"steps": 1599997,
"file_path": "results/Huggy2/Huggy/Huggy-1599997.onnx",
"reward": 3.7522229461958916,
"creation_time": 1719585203.232977,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599997.pt"
]
},
{
"steps": 1799880,
"file_path": "results/Huggy2/Huggy/Huggy-1799880.onnx",
"reward": 3.448214496596385,
"creation_time": 1719585447.250855,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799880.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy2/Huggy/Huggy-1999995.onnx",
"reward": 5.159111499786377,
"creation_time": 1719585698.068071,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000107,
"file_path": "results/Huggy2/Huggy/Huggy-2000107.onnx",
"reward": 5.7943010330200195,
"creation_time": 1719585698.2430549,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000107.pt"
]
}
],
"final_checkpoint": {
"steps": 2000107,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 5.7943010330200195,
"creation_time": 1719585698.2430549,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000107.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}