ppo-Huggy_2.0 / run_logs /training_status.json
Lew's picture
Huggy_2.0
95b8f8b
{
"Huggy": {
"checkpoints": [
{
"steps": 199833,
"file_path": "results/Huggy_2.0/Huggy/Huggy-199833.onnx",
"reward": 3.1560795277845664,
"creation_time": 1699603268.3771865,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-199833.pt"
]
},
{
"steps": 399836,
"file_path": "results/Huggy_2.0/Huggy/Huggy-399836.onnx",
"reward": 3.926434960630205,
"creation_time": 1699603498.0701616,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-399836.pt"
]
},
{
"steps": 599991,
"file_path": "results/Huggy_2.0/Huggy/Huggy-599991.onnx",
"reward": 3.9020688037077584,
"creation_time": 1699603729.5813942,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-599991.pt"
]
},
{
"steps": 799911,
"file_path": "results/Huggy_2.0/Huggy/Huggy-799911.onnx",
"reward": 3.7277047741198968,
"creation_time": 1699603960.4877343,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-799911.pt"
]
},
{
"steps": 999953,
"file_path": "results/Huggy_2.0/Huggy/Huggy-999953.onnx",
"reward": 4.144810387362605,
"creation_time": 1699604196.5129933,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-999953.pt"
]
},
{
"steps": 1199971,
"file_path": "results/Huggy_2.0/Huggy/Huggy-1199971.onnx",
"reward": 3.7065511503640343,
"creation_time": 1699604433.2853122,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-1199971.pt"
]
},
{
"steps": 1399982,
"file_path": "results/Huggy_2.0/Huggy/Huggy-1399982.onnx",
"reward": 3.4460705518722534,
"creation_time": 1699604670.2959251,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-1399982.pt"
]
},
{
"steps": 1599870,
"file_path": "results/Huggy_2.0/Huggy/Huggy-1599870.onnx",
"reward": 3.8942955817495073,
"creation_time": 1699604904.0411239,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-1599870.pt"
]
},
{
"steps": 1799995,
"file_path": "results/Huggy_2.0/Huggy/Huggy-1799995.onnx",
"reward": 4.256319040298462,
"creation_time": 1699605141.5018065,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-1799995.pt"
]
},
{
"steps": 1999986,
"file_path": "results/Huggy_2.0/Huggy/Huggy-1999986.onnx",
"reward": 3.8703428191297196,
"creation_time": 1699605382.1724892,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-1999986.pt"
]
},
{
"steps": 2000010,
"file_path": "results/Huggy_2.0/Huggy/Huggy-2000010.onnx",
"reward": 3.8293535933978315,
"creation_time": 1699605382.2444348,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-2000010.pt"
]
}
],
"final_checkpoint": {
"steps": 2000010,
"file_path": "results/Huggy_2.0/Huggy.onnx",
"reward": 3.8293535933978315,
"creation_time": 1699605382.2444348,
"auxillary_file_paths": [
"results/Huggy_2.0/Huggy/Huggy-2000010.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}