ppo-Huggy / run_logs /training_status.json
Kentris's picture
Huggy
c7c5be9
{
"Huggy": {
"checkpoints": [
{
"steps": 199941,
"file_path": "results/Huggy/Huggy/Huggy-199941.onnx",
"reward": 3.3157049468585424,
"creation_time": 1677886583.127745,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199941.pt"
]
},
{
"steps": 399734,
"file_path": "results/Huggy/Huggy/Huggy-399734.onnx",
"reward": 3.5021728721715637,
"creation_time": 1677886841.5119102,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399734.pt"
]
},
{
"steps": 599969,
"file_path": "results/Huggy/Huggy/Huggy-599969.onnx",
"reward": 3.7629170936086904,
"creation_time": 1677887102.9195075,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599969.pt"
]
},
{
"steps": 799962,
"file_path": "results/Huggy/Huggy/Huggy-799962.onnx",
"reward": 3.6511452460801728,
"creation_time": 1677887360.7435617,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799962.pt"
]
},
{
"steps": 999978,
"file_path": "results/Huggy/Huggy/Huggy-999978.onnx",
"reward": 3.6836775783600846,
"creation_time": 1677887623.3484452,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999978.pt"
]
},
{
"steps": 1199992,
"file_path": "results/Huggy/Huggy/Huggy-1199992.onnx",
"reward": 3.157864673095837,
"creation_time": 1677887884.9178374,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199992.pt"
]
},
{
"steps": 1399973,
"file_path": "results/Huggy/Huggy/Huggy-1399973.onnx",
"reward": 3.8264989901452044,
"creation_time": 1677888147.2564535,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399973.pt"
]
},
{
"steps": 1599996,
"file_path": "results/Huggy/Huggy/Huggy-1599996.onnx",
"reward": 3.81105680366945,
"creation_time": 1677888413.6974313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599996.pt"
]
},
{
"steps": 1799954,
"file_path": "results/Huggy/Huggy/Huggy-1799954.onnx",
"reward": 3.801396679133177,
"creation_time": 1677888676.4216442,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799954.pt"
]
},
{
"steps": 1999921,
"file_path": "results/Huggy/Huggy/Huggy-1999921.onnx",
"reward": 4.322614796459675,
"creation_time": 1677888943.0158293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999921.pt"
]
},
{
"steps": 2000016,
"file_path": "results/Huggy/Huggy/Huggy-2000016.onnx",
"reward": 4.346885323524475,
"creation_time": 1677888943.1346624,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
],
"final_checkpoint": {
"steps": 2000016,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.346885323524475,
"creation_time": 1677888943.1346624,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000016.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}