ppo-Huggy / run_logs /training_status.json
bee-eater78's picture
Huggy
901de88 verified
{
"Huggy": {
"checkpoints": [
{
"steps": 199719,
"file_path": "results/Huggy2/Huggy/Huggy-199719.onnx",
"reward": 3.337057386383866,
"creation_time": 1720198921.6023202,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-199719.pt"
]
},
{
"steps": 399882,
"file_path": "results/Huggy2/Huggy/Huggy-399882.onnx",
"reward": 3.412763239787175,
"creation_time": 1720199173.2654235,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-399882.pt"
]
},
{
"steps": 599964,
"file_path": "results/Huggy2/Huggy/Huggy-599964.onnx",
"reward": 3.6645776884896413,
"creation_time": 1720199433.1945252,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-599964.pt"
]
},
{
"steps": 799972,
"file_path": "results/Huggy2/Huggy/Huggy-799972.onnx",
"reward": 3.7313395229268207,
"creation_time": 1720199684.6120203,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-799972.pt"
]
},
{
"steps": 999998,
"file_path": "results/Huggy2/Huggy/Huggy-999998.onnx",
"reward": 3.881112504998843,
"creation_time": 1720199945.1084616,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-999998.pt"
]
},
{
"steps": 1199918,
"file_path": "results/Huggy2/Huggy/Huggy-1199918.onnx",
"reward": 3.6950830367146708,
"creation_time": 1720200204.7212374,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1199918.pt"
]
},
{
"steps": 1399900,
"file_path": "results/Huggy2/Huggy/Huggy-1399900.onnx",
"reward": 3.752249143529667,
"creation_time": 1720200453.860903,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1399900.pt"
]
},
{
"steps": 1599884,
"file_path": "results/Huggy2/Huggy/Huggy-1599884.onnx",
"reward": 3.8552374247142245,
"creation_time": 1720200712.421676,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1599884.pt"
]
},
{
"steps": 1799935,
"file_path": "results/Huggy2/Huggy/Huggy-1799935.onnx",
"reward": 3.765445627272129,
"creation_time": 1720200973.804804,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1799935.pt"
]
},
{
"steps": 1999994,
"file_path": "results/Huggy2/Huggy/Huggy-1999994.onnx",
"reward": 3.2569069862365723,
"creation_time": 1720201234.1338809,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-1999994.pt"
]
},
{
"steps": 2000034,
"file_path": "results/Huggy2/Huggy/Huggy-2000034.onnx",
"reward": 3.2166526913642883,
"creation_time": 1720201234.3165092,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000034.pt"
]
}
],
"final_checkpoint": {
"steps": 2000034,
"file_path": "results/Huggy2/Huggy.onnx",
"reward": 3.2166526913642883,
"creation_time": 1720201234.3165092,
"auxillary_file_paths": [
"results/Huggy2/Huggy/Huggy-2000034.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.3.0+cu121"
}
}