ppo-Huggy / run_logs /training_status.json
ByteExplorer's picture
Huggy
47aaf3f
{
"Huggy": {
"checkpoints": [
{
"steps": 199801,
"file_path": "results/Huggy/Huggy/Huggy-199801.onnx",
"reward": 3.1289754732331234,
"creation_time": 1686496729.2803233,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199801.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy/Huggy/Huggy-399995.onnx",
"reward": 3.7724451402138017,
"creation_time": 1686496971.863093,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599993,
"file_path": "results/Huggy/Huggy/Huggy-599993.onnx",
"reward": 3.495355463027954,
"creation_time": 1686497211.4498072,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599993.pt"
]
},
{
"steps": 799895,
"file_path": "results/Huggy/Huggy/Huggy-799895.onnx",
"reward": 3.8679686583710846,
"creation_time": 1686497439.597908,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799895.pt"
]
},
{
"steps": 999960,
"file_path": "results/Huggy/Huggy/Huggy-999960.onnx",
"reward": 3.530685831199993,
"creation_time": 1686497674.0026958,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999960.pt"
]
},
{
"steps": 1199918,
"file_path": "results/Huggy/Huggy/Huggy-1199918.onnx",
"reward": 3.596474716880105,
"creation_time": 1686497912.9345763,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199918.pt"
]
},
{
"steps": 1399939,
"file_path": "results/Huggy/Huggy/Huggy-1399939.onnx",
"reward": 3.794469819079697,
"creation_time": 1686498148.9738975,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399939.pt"
]
},
{
"steps": 1599963,
"file_path": "results/Huggy/Huggy/Huggy-1599963.onnx",
"reward": 3.7026109832028546,
"creation_time": 1686498388.3407547,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599963.pt"
]
},
{
"steps": 1799974,
"file_path": "results/Huggy/Huggy/Huggy-1799974.onnx",
"reward": 3.5158856651756203,
"creation_time": 1686498628.618612,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799974.pt"
]
},
{
"steps": 1999327,
"file_path": "results/Huggy/Huggy/Huggy-1999327.onnx",
"reward": 2.9426310194863214,
"creation_time": 1686498875.5765495,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999327.pt"
]
},
{
"steps": 2000077,
"file_path": "results/Huggy/Huggy/Huggy-2000077.onnx",
"reward": 2.3508500933647154,
"creation_time": 1686498875.7175856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000077.pt"
]
}
],
"final_checkpoint": {
"steps": 2000077,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 2.3508500933647154,
"creation_time": 1686498875.7175856,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000077.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "0.31.0.dev0",
"torch_version": "1.11.0+cu102"
}
}