ppo-Huggy / run_logs /training_status.json
dcduplooy's picture
Huggy
04b5dbf
{
"Huggy": {
"checkpoints": [
{
"steps": 199788,
"file_path": "results/Huggy/Huggy/Huggy-199788.onnx",
"reward": 3.5809110823799584,
"creation_time": 1677078832.8017797,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199788.pt"
]
},
{
"steps": 399897,
"file_path": "results/Huggy/Huggy/Huggy-399897.onnx",
"reward": 3.7756336192562157,
"creation_time": 1677079069.8836172,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399897.pt"
]
},
{
"steps": 599977,
"file_path": "results/Huggy/Huggy/Huggy-599977.onnx",
"reward": 4.142357760462268,
"creation_time": 1677079313.9475064,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599977.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy/Huggy/Huggy-799989.onnx",
"reward": 3.9442763792809528,
"creation_time": 1677079554.089141,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999959,
"file_path": "results/Huggy/Huggy/Huggy-999959.onnx",
"reward": 3.8341628698507946,
"creation_time": 1677079798.0972285,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999959.pt"
]
},
{
"steps": 1199991,
"file_path": "results/Huggy/Huggy/Huggy-1199991.onnx",
"reward": 3.7698625463086204,
"creation_time": 1677080041.2005634,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199991.pt"
]
},
{
"steps": 1399964,
"file_path": "results/Huggy/Huggy/Huggy-1399964.onnx",
"reward": 3.8723483085632324,
"creation_time": 1677080286.5828726,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399964.pt"
]
},
{
"steps": 1599976,
"file_path": "results/Huggy/Huggy/Huggy-1599976.onnx",
"reward": 3.8993928338068105,
"creation_time": 1677080528.011398,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599976.pt"
]
},
{
"steps": 1799929,
"file_path": "results/Huggy/Huggy/Huggy-1799929.onnx",
"reward": 3.975352759031873,
"creation_time": 1677080769.0386474,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799929.pt"
]
},
{
"steps": 1999975,
"file_path": "results/Huggy/Huggy/Huggy-1999975.onnx",
"reward": 4.120581670885994,
"creation_time": 1677081003.5085897,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999975.pt"
]
},
{
"steps": 2000085,
"file_path": "results/Huggy/Huggy/Huggy-2000085.onnx",
"reward": 4.126196197902455,
"creation_time": 1677081003.6257405,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000085.pt"
]
}
],
"final_checkpoint": {
"steps": 2000085,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 4.126196197902455,
"creation_time": 1677081003.6257405,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000085.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}