ppo-Huggy / run_logs /training_status.json
aronmal's picture
Huggy
8642e1d
{
"Huggy": {
"checkpoints": [
{
"steps": 199948,
"file_path": "results/Huggy/Huggy/Huggy-199948.onnx",
"reward": 3.6382277844583286,
"creation_time": 1688367416.9397001,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199948.pt"
]
},
{
"steps": 399978,
"file_path": "results/Huggy/Huggy/Huggy-399978.onnx",
"reward": 3.8567474169863596,
"creation_time": 1688367646.3667536,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399978.pt"
]
},
{
"steps": 599947,
"file_path": "results/Huggy/Huggy/Huggy-599947.onnx",
"reward": 3.8904042291641234,
"creation_time": 1688367876.220682,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599947.pt"
]
},
{
"steps": 799964,
"file_path": "results/Huggy/Huggy/Huggy-799964.onnx",
"reward": 3.8402582843446993,
"creation_time": 1688368105.1710792,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799964.pt"
]
},
{
"steps": 999973,
"file_path": "results/Huggy/Huggy/Huggy-999973.onnx",
"reward": 3.800571551473122,
"creation_time": 1688368337.2101033,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999973.pt"
]
},
{
"steps": 1199791,
"file_path": "results/Huggy/Huggy/Huggy-1199791.onnx",
"reward": 3.6130282360574473,
"creation_time": 1688368570.3906558,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199791.pt"
]
},
{
"steps": 1399940,
"file_path": "results/Huggy/Huggy/Huggy-1399940.onnx",
"reward": 3.473895256962281,
"creation_time": 1688368797.2090838,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399940.pt"
]
},
{
"steps": 1599927,
"file_path": "results/Huggy/Huggy/Huggy-1599927.onnx",
"reward": 3.5353238609500397,
"creation_time": 1688369029.8587751,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599927.pt"
]
},
{
"steps": 1799553,
"file_path": "results/Huggy/Huggy/Huggy-1799553.onnx",
"reward": 3.5656461434231863,
"creation_time": 1688369262.3147252,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799553.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 0.6720938682556152,
"creation_time": 1688369492.338894,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000099,
"file_path": "results/Huggy/Huggy/Huggy-2000099.onnx",
"reward": 2.711174964904785,
"creation_time": 1688369492.4659402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
],
"final_checkpoint": {
"steps": 2000099,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.711174964904785,
"creation_time": 1688369492.4659402,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000099.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}