ppo-Huggy / run_logs /training_status.json
marc4gov's picture
Huggy
c667e06
{
"Huggy": {
"checkpoints": [
{
"steps": 199888,
"file_path": "results/Huggy/Huggy/Huggy-199888.onnx",
"reward": 3.3878886494480196,
"creation_time": 1704648587.9507444,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-199888.pt"
]
},
{
"steps": 399963,
"file_path": "results/Huggy/Huggy/Huggy-399963.onnx",
"reward": 3.8710845449696416,
"creation_time": 1704648831.8819313,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-399963.pt"
]
},
{
"steps": 599981,
"file_path": "results/Huggy/Huggy/Huggy-599981.onnx",
"reward": 4.0150936945624975,
"creation_time": 1704649077.051413,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-599981.pt"
]
},
{
"steps": 799726,
"file_path": "results/Huggy/Huggy/Huggy-799726.onnx",
"reward": 3.9977806376462635,
"creation_time": 1704649321.010892,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-799726.pt"
]
},
{
"steps": 999962,
"file_path": "results/Huggy/Huggy/Huggy-999962.onnx",
"reward": 3.6196800505413727,
"creation_time": 1704649569.6597846,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-999962.pt"
]
},
{
"steps": 1199970,
"file_path": "results/Huggy/Huggy/Huggy-1199970.onnx",
"reward": 3.796338978803383,
"creation_time": 1704649820.426262,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1199970.pt"
]
},
{
"steps": 1399941,
"file_path": "results/Huggy/Huggy/Huggy-1399941.onnx",
"reward": 3.9484074910481772,
"creation_time": 1704650077.8913484,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1399941.pt"
]
},
{
"steps": 1599994,
"file_path": "results/Huggy/Huggy/Huggy-1599994.onnx",
"reward": 3.620493612002805,
"creation_time": 1704650324.1797194,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1599994.pt"
]
},
{
"steps": 1799963,
"file_path": "results/Huggy/Huggy/Huggy-1799963.onnx",
"reward": 3.901753603297973,
"creation_time": 1704650573.144539,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1799963.pt"
]
},
{
"steps": 1999995,
"file_path": "results/Huggy/Huggy/Huggy-1999995.onnx",
"reward": 4.055144325379403,
"creation_time": 1704650819.76364,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-1999995.pt"
]
},
{
"steps": 2000038,
"file_path": "results/Huggy/Huggy/Huggy-2000038.onnx",
"reward": 3.9691467732191086,
"creation_time": 1704650819.879562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
],
"final_checkpoint": {
"steps": 2000038,
"file_path": "results/Huggy/Huggy.onnx",
"reward": 3.9691467732191086,
"creation_time": 1704650819.879562,
"auxillary_file_paths": [
"results/Huggy/Huggy/Huggy-2000038.pt"
]
}
},
"metadata": {
"stats_format_version": "0.3.0",
"mlagents_version": "1.1.0.dev0",
"torch_version": "2.1.2+cu121"
}
}