ppo-Huggy / run_logs /training_status.json
AtilliO's picture
Huggy
91e78f0
{
"Huggy": {
"checkpoints": [
{
"steps": 199894,
"file_path": "results/Huggy/Huggy/Huggy-199894.onnx",
"reward": 3.235453750536992,
"creation_time": 1690896055.588689,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199894.pt"
]
},
{
"steps": 399790,
"file_path": "results/Huggy/Huggy/Huggy-399790.onnx",
"reward": 3.7935135050823816,
"creation_time": 1690896334.479394,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399790.pt"
]
},
{
"steps": 599962,
"file_path": "results/Huggy/Huggy/Huggy-599962.onnx",
"reward": 4.237364808718364,
"creation_time": 1690896618.500203,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599962.pt"
]
},
{
"steps": 799763,
"file_path": "results/Huggy/Huggy/Huggy-799763.onnx",
"reward": 3.986721542459973,
"creation_time": 1690896898.9706779,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799763.pt"
]
},
{
"steps": 999934,
"file_path": "results/Huggy/Huggy/Huggy-999934.onnx",
"reward": 4.0063259863271945,
"creation_time": 1690897179.9473944,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999934.pt"
]
},
{
"steps": 1199969,
"file_path": "results/Huggy/Huggy/Huggy-1199969.onnx",
"reward": 4.114591569736086,
"creation_time": 1690897461.9563093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199969.pt"
]
},
{
"steps": 1399991,
"file_path": "results/Huggy/Huggy/Huggy-1399991.onnx",
"reward": 3.781090259839947,
"creation_time": 1690897735.0025938,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399991.pt"
]
},
{
"steps": 1599930,
"file_path": "results/Huggy/Huggy/Huggy-1599930.onnx",
"reward": 3.8228242860423576,
"creation_time": 1690898009.7804036,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599930.pt"
]
},
{
"steps": 1799997,
"file_path": "results/Huggy/Huggy/Huggy-1799997.onnx",
"reward": 3.6568886122920294,
"creation_time": 1690898290.8860297,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799997.pt"
]
},
{
"steps": 1999975,
"file_path": "results/Huggy/Huggy/Huggy-1999975.onnx",
"reward": 3.792026400566101,
"creation_time": 1690898578.5937028,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999975.pt"
]
},
{
"steps": 2000088,
"file_path": "results/Huggy/Huggy/Huggy-2000088.onnx",
"reward": 4.09942549925584,
"creation_time": 1690898578.7363188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000088.pt"
]
}
],
"final_checkpoint": {
"steps": 2000088,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.09942549925584,
"creation_time": 1690898578.7363188,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000088.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}