ppo-Huggy / run_logs /training_status.json
nemanjar's picture
Huggy
d7608e3
{
"Huggy": {
"checkpoints": [
{
"steps": 199900,
"file_path": "results/Huggy/Huggy/Huggy-199900.onnx",
"reward": 3.51115761819433,
"creation_time": 1672220022.7495725,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199900.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy/Huggy/Huggy-399960.onnx",
"reward": 3.215156636919294,
"creation_time": 1672220245.6896145,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599944,
"file_path": "results/Huggy/Huggy/Huggy-599944.onnx",
"reward": 3.230553773733286,
"creation_time": 1672220468.164289,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599944.pt"
]
},
{
"steps": 799989,
"file_path": "results/Huggy/Huggy/Huggy-799989.onnx",
"reward": 3.786407832280699,
"creation_time": 1672220691.051206,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799989.pt"
]
},
{
"steps": 999974,
"file_path": "results/Huggy/Huggy/Huggy-999974.onnx",
"reward": 3.312188290414356,
"creation_time": 1672220918.408911,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999974.pt"
]
},
{
"steps": 1199881,
"file_path": "results/Huggy/Huggy/Huggy-1199881.onnx",
"reward": 3.8033661408857866,
"creation_time": 1672221145.559458,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199881.pt"
]
},
{
"steps": 1399989,
"file_path": "results/Huggy/Huggy/Huggy-1399989.onnx",
"reward": 3.8317748551637356,
"creation_time": 1672221370.3940353,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399989.pt"
]
},
{
"steps": 1599951,
"file_path": "results/Huggy/Huggy/Huggy-1599951.onnx",
"reward": 3.659264494035695,
"creation_time": 1672221594.7456667,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599951.pt"
]
},
{
"steps": 1799959,
"file_path": "results/Huggy/Huggy/Huggy-1799959.onnx",
"reward": 3.8999619463394426,
"creation_time": 1672221823.1509721,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799959.pt"
]
},
{
"steps": 1999942,
"file_path": "results/Huggy/Huggy/Huggy-1999942.onnx",
"reward": 3.582419741153717,
"creation_time": 1672222051.198077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999942.pt"
]
},
{
"steps": 2000010,
"file_path": "results/Huggy/Huggy/Huggy-2000010.onnx",
"reward": 3.451995166865262,
"creation_time": 1672222051.3293896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
],
"final_checkpoint": {
"steps": 2000010,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.451995166865262,
"creation_time": 1672222051.3293896,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000010.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}