ppo-Huggy / run_logs /training_status.json
danieliser's picture
Huggy
2da728d
{
"Huggy": {
"checkpoints": [
{
"steps": 199944,
"file_path": "results/Huggy/Huggy/Huggy-199944.onnx",
"reward": 3.447901689781333,
"creation_time": 1681808281.6569905,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199944.pt"
]
},
{
"steps": 399978,
"file_path": "results/Huggy/Huggy/Huggy-399978.onnx",
"reward": 4.15223652869463,
"creation_time": 1681808547.0931034,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399978.pt"
]
},
{
"steps": 599990,
"file_path": "results/Huggy/Huggy/Huggy-599990.onnx",
"reward": 3.337475856145223,
"creation_time": 1681808814.7028155,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599990.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy/Huggy/Huggy-799980.onnx",
"reward": 3.7454962228784465,
"creation_time": 1681809081.8807156,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999961,
"file_path": "results/Huggy/Huggy/Huggy-999961.onnx",
"reward": 3.9965416762758705,
"creation_time": 1681809353.2130153,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999961.pt"
]
},
{
"steps": 1199911,
"file_path": "results/Huggy/Huggy/Huggy-1199911.onnx",
"reward": 4.120620751380921,
"creation_time": 1681809618.9108293,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199911.pt"
]
},
{
"steps": 1399932,
"file_path": "results/Huggy/Huggy/Huggy-1399932.onnx",
"reward": 3.9981862157583237,
"creation_time": 1681809888.0066094,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399932.pt"
]
},
{
"steps": 1599960,
"file_path": "results/Huggy/Huggy/Huggy-1599960.onnx",
"reward": 3.8625275709412317,
"creation_time": 1681810150.031134,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599960.pt"
]
},
{
"steps": 1799933,
"file_path": "results/Huggy/Huggy/Huggy-1799933.onnx",
"reward": 3.8848369320233664,
"creation_time": 1681810420.4512286,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799933.pt"
]
},
{
"steps": 1999931,
"file_path": "results/Huggy/Huggy/Huggy-1999931.onnx",
"reward": 3.626306427376611,
"creation_time": 1681810689.1401737,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999931.pt"
]
},
{
"steps": 2000001,
"file_path": "results/Huggy/Huggy/Huggy-2000001.onnx",
"reward": 3.644109705038238,
"creation_time": 1681810689.2788727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
],
"final_checkpoint": {
"steps": 2000001,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.644109705038238,
"creation_time": 1681810689.2788727,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000001.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}