ppo-Huggy / run_logs /training_status.json
1itai1's picture
Huggy
029af6f
{
"Huggy": {
"checkpoints": [
{
"steps": 199779,
"file_path": "results/Huggy/Huggy/Huggy-199779.onnx",
"reward": 3.5327331054778326,
"creation_time": 1671627038.7122602,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199779.pt"
]
},
{
"steps": 399889,
"file_path": "results/Huggy/Huggy/Huggy-399889.onnx",
"reward": 3.5718623433472976,
"creation_time": 1671627253.5264096,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399889.pt"
]
},
{
"steps": 599985,
"file_path": "results/Huggy/Huggy/Huggy-599985.onnx",
"reward": 3.956183339158694,
"creation_time": 1671627470.750697,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599985.pt"
]
},
{
"steps": 799885,
"file_path": "results/Huggy/Huggy/Huggy-799885.onnx",
"reward": 4.0424524651219444,
"creation_time": 1671627685.5931997,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799885.pt"
]
},
{
"steps": 999305,
"file_path": "results/Huggy/Huggy/Huggy-999305.onnx",
"reward": 4.027116323821247,
"creation_time": 1671627904.7021432,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999305.pt"
]
},
{
"steps": 1199979,
"file_path": "results/Huggy/Huggy/Huggy-1199979.onnx",
"reward": 3.4172045345037754,
"creation_time": 1671628123.9833853,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199979.pt"
]
},
{
"steps": 1399955,
"file_path": "results/Huggy/Huggy/Huggy-1399955.onnx",
"reward": 3.983732071683163,
"creation_time": 1671628341.3573644,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399955.pt"
]
},
{
"steps": 1599868,
"file_path": "results/Huggy/Huggy/Huggy-1599868.onnx",
"reward": 3.821688108767072,
"creation_time": 1671628560.2133408,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599868.pt"
]
},
{
"steps": 1799945,
"file_path": "results/Huggy/Huggy/Huggy-1799945.onnx",
"reward": 3.962141649723053,
"creation_time": 1671628779.5137138,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799945.pt"
]
},
{
"steps": 1999978,
"file_path": "results/Huggy/Huggy/Huggy-1999978.onnx",
"reward": 3.4494986976896014,
"creation_time": 1671628999.4285822,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999978.pt"
]
},
{
"steps": 2000030,
"file_path": "results/Huggy/Huggy/Huggy-2000030.onnx",
"reward": 3.405678970946206,
"creation_time": 1671628999.5499525,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
],
"final_checkpoint": {
"steps": 2000030,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.405678970946206,
"creation_time": 1671628999.5499525,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000030.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.29.0.dev0",
"torch_version": "1.8.1+cu102"
}
}