ppo-Huggy / run_logs /training_status.json
nzdb70's picture
Huggy
87759a0 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199984,
"file_path": "results/Huggy2/Huggy/Huggy-199984.onnx",
"reward": 3.3732682308729958,
"creation_time": 1712123854.4855778,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199984.pt"
]
},
{
"steps": 399990,
"file_path": "results/Huggy2/Huggy/Huggy-399990.onnx",
"reward": 3.5927596879005432,
"creation_time": 1712124094.5673077,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399990.pt"
]
},
{
"steps": 599829,
"file_path": "results/Huggy2/Huggy/Huggy-599829.onnx",
"reward": 4.2349982261657715,
"creation_time": 1712124336.1639385,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599829.pt"
]
},
{
"steps": 799956,
"file_path": "results/Huggy2/Huggy/Huggy-799956.onnx",
"reward": 3.835831753823949,
"creation_time": 1712124576.4286358,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799956.pt"
]
},
{
"steps": 999805,
"file_path": "results/Huggy2/Huggy/Huggy-999805.onnx",
"reward": 4.046388504384947,
"creation_time": 1712124820.7533586,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999805.pt"
]
},
{
"steps": 1199913,
"file_path": "results/Huggy2/Huggy/Huggy-1199913.onnx",
"reward": 4.086439611450318,
"creation_time": 1712125062.3751488,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199913.pt"
]
},
{
"steps": 1399994,
"file_path": "results/Huggy2/Huggy/Huggy-1399994.onnx",
"reward": 3.845434794736945,
"creation_time": 1712125303.8188775,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399994.pt"
]
},
{
"steps": 1599944,
"file_path": "results/Huggy2/Huggy/Huggy-1599944.onnx",
"reward": 3.952255398966372,
"creation_time": 1712125543.5803838,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599944.pt"
]
},
{
"steps": 1799932,
"file_path": "results/Huggy2/Huggy/Huggy-1799932.onnx",
"reward": 3.4130786934069226,
"creation_time": 1712125782.4467812,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799932.pt"
]
},
{
"steps": 1999588,
"file_path": "results/Huggy2/Huggy/Huggy-1999588.onnx",
"reward": 3.6834242137165756,
"creation_time": 1712126023.3601258,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999588.pt"
]
},
{
"steps": 2000338,
"file_path": "results/Huggy2/Huggy/Huggy-2000338.onnx",
"reward": 3.6535560911011804,
"creation_time": 1712126023.5024989,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000338.pt"
]
}
],
"final_checkpoint": {
"steps": 2000338,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.6535560911011804,
"creation_time": 1712126023.5024989,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000338.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.2.1+cu121"
}
}