ppo-Huggy / run_logs /training_status.json
audreyfeldroy's picture
Huggy
83109dd
{
"Huggy": {
"checkpoints": [
{
"steps": 199806,
"file_path": "results/Huggy/Huggy/Huggy-199806.onnx",
"reward": 3.2200317402680714,
"creation_time": 1684414585.1302502,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199806.pt"
]
},
{
"steps": 399895,
"file_path": "results/Huggy/Huggy/Huggy-399895.onnx",
"reward": 3.8067710838819804,
"creation_time": 1684414780.3789055,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399895.pt"
]
},
{
"steps": 599965,
"file_path": "results/Huggy/Huggy/Huggy-599965.onnx",
"reward": 3.9199193954467773,
"creation_time": 1684414976.601231,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599965.pt"
]
},
{
"steps": 799980,
"file_path": "results/Huggy/Huggy/Huggy-799980.onnx",
"reward": 3.8510675731707704,
"creation_time": 1684415174.6150613,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799980.pt"
]
},
{
"steps": 999983,
"file_path": "results/Huggy/Huggy/Huggy-999983.onnx",
"reward": 3.7399132234870263,
"creation_time": 1684415372.7711358,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999983.pt"
]
},
{
"steps": 1199959,
"file_path": "results/Huggy/Huggy/Huggy-1199959.onnx",
"reward": 3.859860549015658,
"creation_time": 1684415570.117411,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199959.pt"
]
},
{
"steps": 1399920,
"file_path": "results/Huggy/Huggy/Huggy-1399920.onnx",
"reward": 3.6864762838681537,
"creation_time": 1684415766.6833708,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399920.pt"
]
},
{
"steps": 1599905,
"file_path": "results/Huggy/Huggy/Huggy-1599905.onnx",
"reward": 3.6725785513240172,
"creation_time": 1684415965.4227145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599905.pt"
]
},
{
"steps": 1799949,
"file_path": "results/Huggy/Huggy/Huggy-1799949.onnx",
"reward": 3.680830857775233,
"creation_time": 1684416163.984671,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799949.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 3.889298010516811,
"creation_time": 1684416362.5175412,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000089,
"file_path": "results/Huggy/Huggy/Huggy-2000089.onnx",
"reward": 3.88859393408424,
"creation_time": 1684416362.6461694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000089.pt"
]
}
],
"final_checkpoint": {
"steps": 2000089,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.88859393408424,
"creation_time": 1684416362.6461694,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000089.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}