ppo-Huggy / run_logs /training_status.json
spoggy's picture
Huggy
e381498
{
"Huggy": {
"checkpoints": [
{
"steps": 199906,
"file_path": "results/Huggy/Huggy/Huggy-199906.onnx",
"reward": 3.402008111359643,
"creation_time": 1697819519.5358858,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199906.pt"
]
},
{
"steps": 399995,
"file_path": "results/Huggy/Huggy/Huggy-399995.onnx",
"reward": 3.7626939699298045,
"creation_time": 1697819743.3018453,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399995.pt"
]
},
{
"steps": 599921,
"file_path": "results/Huggy/Huggy/Huggy-599921.onnx",
"reward": 3.436653900146484,
"creation_time": 1697819975.0683146,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599921.pt"
]
},
{
"steps": 799882,
"file_path": "results/Huggy/Huggy/Huggy-799882.onnx",
"reward": 4.156896003064393,
"creation_time": 1697820205.2486086,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799882.pt"
]
},
{
"steps": 999828,
"file_path": "results/Huggy/Huggy/Huggy-999828.onnx",
"reward": 3.797376268022642,
"creation_time": 1697820438.010296,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999828.pt"
]
},
{
"steps": 1199930,
"file_path": "results/Huggy/Huggy/Huggy-1199930.onnx",
"reward": 4.226755126246384,
"creation_time": 1697820675.0799718,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199930.pt"
]
},
{
"steps": 1399937,
"file_path": "results/Huggy/Huggy/Huggy-1399937.onnx",
"reward": 3.7838187073586416,
"creation_time": 1697820907.9265082,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399937.pt"
]
},
{
"steps": 1599939,
"file_path": "results/Huggy/Huggy/Huggy-1599939.onnx",
"reward": 4.053974595226225,
"creation_time": 1697821142.8208077,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599939.pt"
]
},
{
"steps": 1799982,
"file_path": "results/Huggy/Huggy/Huggy-1799982.onnx",
"reward": 3.8651998221317183,
"creation_time": 1697821378.8410435,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799982.pt"
]
},
{
"steps": 1999992,
"file_path": "results/Huggy/Huggy/Huggy-1999992.onnx",
"reward": 3.731642007827759,
"creation_time": 1697821612.4783444,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999992.pt"
]
},
{
"steps": 2000078,
"file_path": "results/Huggy/Huggy/Huggy-2000078.onnx",
"reward": 3.7378464887539544,
"creation_time": 1697821612.6253183,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
],
"final_checkpoint": {
"steps": 2000078,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.7378464887539544,
"creation_time": 1697821612.6253183,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000078.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.0+cu118"
}
}