ppo-Huggy / run_logs /training_status.json
InfiniteEcho's picture
Huggy
1e97df5 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199872,
"file_path": "results/Huggy2/Huggy/Huggy-199872.onnx",
"reward": 3.368105536326766,
"creation_time": 1719600545.6003935,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199872.pt"
]
},
{
"steps": 399921,
"file_path": "results/Huggy2/Huggy/Huggy-399921.onnx",
"reward": 3.839749637016883,
"creation_time": 1719600782.6337695,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399921.pt"
]
},
{
"steps": 599960,
"file_path": "results/Huggy2/Huggy/Huggy-599960.onnx",
"reward": 3.773958137402168,
"creation_time": 1719601027.4050765,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599960.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy2/Huggy/Huggy-799989.onnx",
"reward": 3.5714103098925167,
"creation_time": 1719601270.110816,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999950,
"file_path": "results/Huggy2/Huggy/Huggy-999950.onnx",
"reward": 3.835360863714507,
"creation_time": 1719601513.3637228,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999950.pt"
]
},
{
"steps": 1199946,
"file_path": "results/Huggy2/Huggy/Huggy-1199946.onnx",
"reward": 3.757324102677797,
"creation_time": 1719601752.844516,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199946.pt"
]
},
{
"steps": 1399904,
"file_path": "results/Huggy2/Huggy/Huggy-1399904.onnx",
"reward": 4.079121504511152,
"creation_time": 1719601998.0404453,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399904.pt"
]
},
{
"steps": 1599992,
"file_path": "results/Huggy2/Huggy/Huggy-1599992.onnx",
"reward": 3.7883129024729483,
"creation_time": 1719602241.0650315,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599992.pt"
]
},
{
"steps": 1799282,
"file_path": "results/Huggy2/Huggy/Huggy-1799282.onnx",
"reward": 4.15218877454176,
"creation_time": 1719602488.9965873,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799282.pt"
]
},
{
"steps": 1999983,
"file_path": "results/Huggy2/Huggy/Huggy-1999983.onnx",
"reward": 3.895582562469574,
"creation_time": 1719602737.3713782,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999983.pt"
]
},
{
"steps": 2000043,
"file_path": "results/Huggy2/Huggy/Huggy-2000043.onnx",
"reward": 3.8895335609004613,
"creation_time": 1719602737.4846113,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
],
"final_checkpoint": {
"steps": 2000043,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.8895335609004613,
"creation_time": 1719602737.4846113,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000043.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}