HuggyDemo / run_logs /training_status.json
fibleep's picture
Huggy
549f089
{
"Huggy": {
"checkpoints": [
{
"steps": 199757,
"file_path": "results/Huggy/Huggy/Huggy-199757.onnx",
"reward": 3.2349996505414738,
"creation_time": 1697658755.256649,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199757.pt"
]
},
{
"steps": 399960,
"file_path": "results/Huggy/Huggy/Huggy-399960.onnx",
"reward": 4.0997795296497035,
"creation_time": 1697658989.0594234,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399960.pt"
]
},
{
"steps": 599860,
"file_path": "results/Huggy/Huggy/Huggy-599860.onnx",
"reward": 3.7486975975334644,
"creation_time": 1697659225.4606633,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599860.pt"
]
},
{
"steps": 799343,
"file_path": "results/Huggy/Huggy/Huggy-799343.onnx",
"reward": 3.8000371069350143,
"creation_time": 1697659461.6984684,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799343.pt"
]
},
{
"steps": 999954,
"file_path": "results/Huggy/Huggy/Huggy-999954.onnx",
"reward": 3.708791895468432,
"creation_time": 1697659706.1392028,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999954.pt"
]
},
{
"steps": 1199907,
"file_path": "results/Huggy/Huggy/Huggy-1199907.onnx",
"reward": 3.6364431822454772,
"creation_time": 1697659944.486381,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199907.pt"
]
},
{
"steps": 1399999,
"file_path": "results/Huggy/Huggy/Huggy-1399999.onnx",
"reward": 3.7203771770000458,
"creation_time": 1697660183.4187074,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399999.pt"
]
},
{
"steps": 1599973,
"file_path": "results/Huggy/Huggy/Huggy-1599973.onnx",
"reward": 3.8565976016449204,
"creation_time": 1697660417.931369,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599973.pt"
]
},
{
"steps": 1799988,
"file_path": "results/Huggy/Huggy/Huggy-1799988.onnx",
"reward": 3.4816928055551317,
"creation_time": 1697660655.533949,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799988.pt"
]
},
{
"steps": 1999885,
"file_path": "results/Huggy/Huggy/Huggy-1999885.onnx",
"reward": 3.225120323896408,
"creation_time": 1697660894.1459982,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999885.pt"
]
},
{
"steps": 2000731,
"file_path": "results/Huggy/Huggy/Huggy-2000731.onnx",
"reward": 2.9443090124563738,
"creation_time": 1697660894.2818973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000731.pt"
]
}
],
"final_checkpoint": {
"steps": 2000731,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.9443090124563738,
"creation_time": 1697660894.2818973,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000731.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.0.1+cu118"
}
}